diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 8775e1464d0..f9c69fbf5d6 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -117,7 +117,7 @@ For Eclipse, go to `Preferences->Java->Installed JREs` and add `-ea` to Please follow these formatting guidelines: * Java indent is 4 spaces -* Line width is 140 characters +* Line width is 100 characters * The rest is left to Java coding standards * Disable “auto-format on save” to prevent unnecessary format changes. This makes reviews much harder as it generates unnecessary formatting changes. If your IDE supports formatting only modified chunks that is fine to do. * Wildcard imports (`import foo.bar.baz.*`) are forbidden and will cause the build to fail. Please attempt to tame your IDE so it doesn't make them and please send a PR against this document with instructions for your IDE if it doesn't contain them. diff --git a/build.gradle b/build.gradle index fd97470ec6c..09748ea1e8a 100644 --- a/build.gradle +++ b/build.gradle @@ -18,15 +18,17 @@ */ import java.nio.file.Path +import java.util.regex.Matcher import org.eclipse.jgit.lib.Repository import org.eclipse.jgit.lib.RepositoryBuilder import org.gradle.plugins.ide.eclipse.model.SourceFolder import org.apache.tools.ant.taskdefs.condition.Os +import org.elasticsearch.gradle.VersionProperties // common maven publishing configuration subprojects { group = 'org.elasticsearch' - version = org.elasticsearch.gradle.VersionProperties.elasticsearch + version = VersionProperties.elasticsearch description = "Elasticsearch subproject ${project.path}" } @@ -59,12 +61,26 @@ configure(subprojects.findAll { it.projectDir.toPath().startsWith(rootPath) }) { } } +int prevMajor = Integer.parseInt(VersionProperties.elasticsearch.split('\\.')[0]) - 1 +String prevSnapshot = VersionProperties.elasticsearch.contains('alpha') ? '-SNAPSHOT' : '' +File versionFile = file('core/src/main/java/org/elasticsearch/Version.java') +List versionLines = versionFile.readLines('UTF-8') +int prevMinor = 0 +for (String line : versionLines) { + Matcher match = line =~ /\W+public static final Version V_${prevMajor}_(\d+)_.*/ + if (match.matches()) { + prevMinor = Math.max(Integer.parseInt(match.group(1)), prevMinor) + } +} + +// injecting groovy property variables into all projects allprojects { - // injecting groovy property variables into all projects project.ext { // for ide hacks... isEclipse = System.getProperty("eclipse.launcher") != null || gradle.startParameter.taskNames.contains('eclipse') || gradle.startParameter.taskNames.contains('cleanEclipse') isIdea = System.getProperty("idea.active") != null || gradle.startParameter.taskNames.contains('idea') || gradle.startParameter.taskNames.contains('cleanIdea') + // for backcompat testing + bwcVersion = "${prevMajor}.${prevMinor}.0${prevSnapshot}" } } @@ -112,6 +128,7 @@ subprojects { "org.elasticsearch.client:transport:${version}": ':client:transport', "org.elasticsearch.test:framework:${version}": ':test:framework', "org.elasticsearch.distribution.integ-test-zip:elasticsearch:${version}": ':distribution:integ-test-zip', + "org.elasticsearch.distribution.zip:elasticsearch:${bwcVersion}": ':distribution:bwc-zip', "org.elasticsearch.distribution.zip:elasticsearch:${version}": ':distribution:zip', "org.elasticsearch.distribution.tar:elasticsearch:${version}": ':distribution:tar', "org.elasticsearch.distribution.rpm:elasticsearch:${version}": ':distribution:rpm', @@ -123,10 +140,12 @@ subprojects { "org.elasticsearch.plugin:lang-mustache-client:${version}": ':modules:lang-mustache', "org.elasticsearch.plugin:percolator-client:${version}": ':modules:percolator', ] - configurations.all { - resolutionStrategy.dependencySubstitution { DependencySubstitutions subs -> - projectSubstitutions.each { k,v -> - subs.substitute(subs.module(k)).with(subs.project(v)) + project.afterEvaluate { + configurations.all { + resolutionStrategy.dependencySubstitution { DependencySubstitutions subs -> + projectSubstitutions.each { k,v -> + subs.substitute(subs.module(k)).with(subs.project(v)) + } } } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index ad1445759a1..dd9d1781ccd 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -328,46 +328,15 @@ class BuildPlugin implements Plugin { return } - // check each dependency for any transitive deps + // fix deps incorrectly marked as runtime back to compile time deps + // see https://discuss.gradle.org/t/maven-publish-plugin-generated-pom-making-dependency-scope-runtime/7494/4 for (Node depNode : depsNodes.get(0).children()) { - String groupId = depNode.get('groupId').get(0).text() - String artifactId = depNode.get('artifactId').get(0).text() - String version = depNode.get('version').get(0).text() - - // fix deps incorrectly marked as runtime back to compile time deps - // see https://discuss.gradle.org/t/maven-publish-plugin-generated-pom-making-dependency-scope-runtime/7494/4 boolean isCompileDep = project.configurations.compile.allDependencies.find { dep -> dep.name == depNode.artifactId.text() } if (depNode.scope.text() == 'runtime' && isCompileDep) { depNode.scope*.value = 'compile' } - - // collect the transitive deps now that we know what this dependency is - String depConfig = transitiveDepConfigName(groupId, artifactId, version) - Configuration configuration = project.configurations.findByName(depConfig) - if (configuration == null) { - continue // we did not make this dep non-transitive - } - Set artifacts = configuration.resolvedConfiguration.resolvedArtifacts - if (artifacts.size() <= 1) { - // this dep has no transitive deps (or the only artifact is itself) - continue - } - - // we now know we have something to exclude, so add exclusions for all artifacts except the main one - Node exclusions = depNode.appendNode('exclusions') - for (ResolvedArtifact artifact : artifacts) { - ModuleVersionIdentifier moduleVersionIdentifier = artifact.moduleVersion.id; - String depGroupId = moduleVersionIdentifier.group - String depArtifactId = moduleVersionIdentifier.name - // add exclusions for all artifacts except the main one - if (depGroupId != groupId || depArtifactId != artifactId) { - Node exclusion = exclusions.appendNode('exclusion') - exclusion.appendNode('groupId', depGroupId) - exclusion.appendNode('artifactId', depArtifactId) - } - } } } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy index 353b8127545..1251be265da 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy @@ -39,6 +39,9 @@ class PluginPropertiesExtension { @Input String classname + @Input + boolean hasNativeController = false + /** Indicates whether the plugin jar should be made available for the transport client. */ @Input boolean hasClientJar = false diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy index 94bc0ba3e75..91efe247a01 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy @@ -79,7 +79,8 @@ class PluginPropertiesTask extends Copy { 'version': stringSnap(extension.version), 'elasticsearchVersion': stringSnap(VersionProperties.elasticsearch), 'javaVersion': project.targetCompatibility as String, - 'classname': extension.classname + 'classname': extension.classname, + 'hasNativeController': extension.hasNativeController ] } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy index f5974d7c0f8..66574f5f289 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy @@ -217,7 +217,7 @@ class VagrantTestPlugin implements Plugin { // Now we iterate over dependencies of the bats configuration. When a project dependency is found, // we bring back its own archives, test files or test utils. project.afterEvaluate { - project.configurations.bats.dependencies.findAll {it.configuration == BATS }.each { d -> + project.configurations.bats.dependencies.findAll {it.targetConfiguration == BATS }.each { d -> if (d instanceof DefaultProjectDependency) { DefaultProjectDependency externalBatsDependency = (DefaultProjectDependency) d Project externalBatsProject = externalBatsDependency.dependencyProject diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index ab0a75a007a..c95ad03f9ac 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -605,7 +605,6 @@ - @@ -1565,14 +1564,12 @@ - - @@ -2307,7 +2304,6 @@ - @@ -2388,7 +2384,6 @@ - @@ -3009,7 +3004,6 @@ - @@ -3949,13 +3943,9 @@ - - - - diff --git a/buildSrc/src/main/resources/forbidden/es-all-signatures.txt b/buildSrc/src/main/resources/forbidden/es-all-signatures.txt index a4666de4fa1..64ae6784f15 100644 --- a/buildSrc/src/main/resources/forbidden/es-all-signatures.txt +++ b/buildSrc/src/main/resources/forbidden/es-all-signatures.txt @@ -44,4 +44,13 @@ java.net.URLConnection#getInputStream() java.net.Socket#connect(java.net.SocketAddress) java.net.Socket#connect(java.net.SocketAddress, int) java.nio.channels.SocketChannel#open(java.net.SocketAddress) -java.nio.channels.SocketChannel#connect(java.net.SocketAddress) \ No newline at end of file +java.nio.channels.SocketChannel#connect(java.net.SocketAddress) + +# This method is misleading, and uses lenient boolean parsing under the hood. If you intend to parse +# a system property as a boolean, use +# org.elasticsearch.common.Booleans#parseBoolean(java.lang.String) on the result of +# java.lang.SystemProperty#getProperty(java.lang.String) instead. If you were not intending to parse +# a system property as a boolean, but instead parse a string to a boolean, use +# org.elasticsearch.common.Booleans#parseBoolean(java.lang.String) directly on the string. +@defaultMessage use org.elasticsearch.common.Booleans#parseBoolean(java.lang.String) +java.lang.Boolean#getBoolean(java.lang.String) diff --git a/buildSrc/src/main/resources/plugin-descriptor.properties b/buildSrc/src/main/resources/plugin-descriptor.properties index ebde46d326b..67c6ee39968 100644 --- a/buildSrc/src/main/resources/plugin-descriptor.properties +++ b/buildSrc/src/main/resources/plugin-descriptor.properties @@ -30,11 +30,15 @@ name=${name} # 'classname': the name of the class to load, fully-qualified. classname=${classname} # -# 'java.version' version of java the code is built against +# 'java.version': version of java the code is built against # use the system property java.specification.version # version string must be a sequence of nonnegative decimal integers # separated by "."'s and may have leading zeros java.version=${javaVersion} # -# 'elasticsearch.version' version of elasticsearch compiled against +# 'elasticsearch.version': version of elasticsearch compiled against elasticsearch.version=${elasticsearchVersion} +### optional elements for plugins: +# +# 'has.native.controller': whether or not the plugin has a native controller +has.native.controller=${hasNativeController} diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 8938a0fdfab..cea96db283d 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,6 +1,6 @@ # When updating elasticsearch, please update 'rest' version in core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy elasticsearch = 6.0.0-alpha1 -lucene = 6.5.0-snapshot-d00c5ca +lucene = 6.5.0 # optional dependencies spatial4j = 0.6 diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java index d39dfaa3e75..58b7df92272 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java @@ -79,9 +79,9 @@ public class RequestTests extends ESTestCase { } public void testDelete() throws IOException { - String index = randomAsciiOfLengthBetween(3, 10); - String type = randomAsciiOfLengthBetween(3, 10); - String id = randomAsciiOfLengthBetween(3, 10); + String index = randomAlphaOfLengthBetween(3, 10); + String type = randomAlphaOfLengthBetween(3, 10); + String id = randomAlphaOfLengthBetween(3, 10); DeleteRequest deleteRequest = new DeleteRequest(index, type, id); Map expectedParams = new HashMap<>(); @@ -93,12 +93,12 @@ public class RequestTests extends ESTestCase { if (frequently()) { if (randomBoolean()) { - String routing = randomAsciiOfLengthBetween(3, 10); + String routing = randomAlphaOfLengthBetween(3, 10); deleteRequest.routing(routing); expectedParams.put("routing", routing); } if (randomBoolean()) { - String parent = randomAsciiOfLengthBetween(3, 10); + String parent = randomAlphaOfLengthBetween(3, 10); deleteRequest.parent(parent); expectedParams.put("parent", parent); } @@ -116,20 +116,20 @@ public class RequestTests extends ESTestCase { } private static void getAndExistsTest(Function requestConverter, String method) { - String index = randomAsciiOfLengthBetween(3, 10); - String type = randomAsciiOfLengthBetween(3, 10); - String id = randomAsciiOfLengthBetween(3, 10); + String index = randomAlphaOfLengthBetween(3, 10); + String type = randomAlphaOfLengthBetween(3, 10); + String id = randomAlphaOfLengthBetween(3, 10); GetRequest getRequest = new GetRequest(index, type, id); Map expectedParams = new HashMap<>(); if (randomBoolean()) { if (randomBoolean()) { - String preference = randomAsciiOfLengthBetween(3, 10); + String preference = randomAlphaOfLengthBetween(3, 10); getRequest.preference(preference); expectedParams.put("preference", preference); } if (randomBoolean()) { - String routing = randomAsciiOfLengthBetween(3, 10); + String routing = randomAlphaOfLengthBetween(3, 10); getRequest.routing(routing); expectedParams.put("routing", routing); } @@ -166,7 +166,7 @@ public class RequestTests extends ESTestCase { String[] storedFields = new String[numStoredFields]; StringBuilder storedFieldsParam = new StringBuilder(); for (int i = 0; i < numStoredFields; i++) { - String storedField = randomAsciiOfLengthBetween(3, 10); + String storedField = randomAlphaOfLengthBetween(3, 10); storedFields[i] = storedField; storedFieldsParam.append(storedField); if (i < numStoredFields - 1) { @@ -188,11 +188,11 @@ public class RequestTests extends ESTestCase { } public void testIndex() throws IOException { - String index = randomAsciiOfLengthBetween(3, 10); - String type = randomAsciiOfLengthBetween(3, 10); + String index = randomAlphaOfLengthBetween(3, 10); + String type = randomAlphaOfLengthBetween(3, 10); IndexRequest indexRequest = new IndexRequest(index, type); - String id = randomBoolean() ? randomAsciiOfLengthBetween(3, 10) : null; + String id = randomBoolean() ? randomAlphaOfLengthBetween(3, 10) : null; indexRequest.id(id); Map expectedParams = new HashMap<>(); @@ -219,17 +219,17 @@ public class RequestTests extends ESTestCase { if (frequently()) { if (randomBoolean()) { - String routing = randomAsciiOfLengthBetween(3, 10); + String routing = randomAlphaOfLengthBetween(3, 10); indexRequest.routing(routing); expectedParams.put("routing", routing); } if (randomBoolean()) { - String parent = randomAsciiOfLengthBetween(3, 10); + String parent = randomAlphaOfLengthBetween(3, 10); indexRequest.parent(parent); expectedParams.put("parent", parent); } if (randomBoolean()) { - String pipeline = randomAsciiOfLengthBetween(3, 10); + String pipeline = randomAlphaOfLengthBetween(3, 10); indexRequest.setPipeline(pipeline); expectedParams.put("pipeline", pipeline); } @@ -270,9 +270,9 @@ public class RequestTests extends ESTestCase { XContentType xContentType = randomFrom(XContentType.values()); Map expectedParams = new HashMap<>(); - String index = randomAsciiOfLengthBetween(3, 10); - String type = randomAsciiOfLengthBetween(3, 10); - String id = randomAsciiOfLengthBetween(3, 10); + String index = randomAlphaOfLengthBetween(3, 10); + String type = randomAlphaOfLengthBetween(3, 10); + String id = randomAlphaOfLengthBetween(3, 10); UpdateRequest updateRequest = new UpdateRequest(index, type, id); updateRequest.detectNoop(randomBoolean()); @@ -295,12 +295,12 @@ public class RequestTests extends ESTestCase { updateRequest.upsert(new IndexRequest().source(source, xContentType)); } if (randomBoolean()) { - String routing = randomAsciiOfLengthBetween(3, 10); + String routing = randomAlphaOfLengthBetween(3, 10); updateRequest.routing(routing); expectedParams.put("routing", routing); } if (randomBoolean()) { - String parent = randomAsciiOfLengthBetween(3, 10); + String parent = randomAlphaOfLengthBetween(3, 10); updateRequest.parent(parent); expectedParams.put("parent", parent); } @@ -416,9 +416,9 @@ public class RequestTests extends ESTestCase { int nbItems = randomIntBetween(10, 100); for (int i = 0; i < nbItems; i++) { - String index = randomAsciiOfLength(5); - String type = randomAsciiOfLength(5); - String id = randomAsciiOfLength(5); + String index = randomAlphaOfLength(5); + String type = randomAlphaOfLength(5); + String id = randomAlphaOfLength(5); BytesReference source = RandomObjects.randomSource(random(), xContentType); DocWriteRequest.OpType opType = randomFrom(DocWriteRequest.OpType.values()); @@ -428,16 +428,16 @@ public class RequestTests extends ESTestCase { IndexRequest indexRequest = new IndexRequest(index, type, id).source(source, xContentType); docWriteRequest = indexRequest; if (randomBoolean()) { - indexRequest.setPipeline(randomAsciiOfLength(5)); + indexRequest.setPipeline(randomAlphaOfLength(5)); } if (randomBoolean()) { - indexRequest.parent(randomAsciiOfLength(5)); + indexRequest.parent(randomAlphaOfLength(5)); } } else if (opType == DocWriteRequest.OpType.CREATE) { IndexRequest createRequest = new IndexRequest(index, type, id).source(source, xContentType).create(true); docWriteRequest = createRequest; if (randomBoolean()) { - createRequest.parent(randomAsciiOfLength(5)); + createRequest.parent(randomAlphaOfLength(5)); } } else if (opType == DocWriteRequest.OpType.UPDATE) { final UpdateRequest updateRequest = new UpdateRequest(index, type, id).doc(new IndexRequest().source(source, xContentType)); @@ -449,14 +449,14 @@ public class RequestTests extends ESTestCase { randomizeFetchSourceContextParams(updateRequest::fetchSource, new HashMap<>()); } if (randomBoolean()) { - updateRequest.parent(randomAsciiOfLength(5)); + updateRequest.parent(randomAlphaOfLength(5)); } } else if (opType == DocWriteRequest.OpType.DELETE) { docWriteRequest = new DeleteRequest(index, type, id); } if (randomBoolean()) { - docWriteRequest.routing(randomAsciiOfLength(10)); + docWriteRequest.routing(randomAlphaOfLength(10)); } if (randomBoolean()) { docWriteRequest.version(randomNonNegativeLong()); @@ -591,7 +591,7 @@ public class RequestTests extends ESTestCase { Map expectedParams = new HashMap<>(); for (int i = 0; i < nbParams; i++) { String paramName = "p_" + i; - String paramValue = randomAsciiOfLength(5); + String paramValue = randomAlphaOfLength(5); params.putParam(paramName, paramValue); expectedParams.put(paramName, paramValue); } @@ -665,7 +665,7 @@ public class RequestTests extends ESTestCase { String[] includes = new String[numIncludes]; StringBuilder includesParam = new StringBuilder(); for (int i = 0; i < numIncludes; i++) { - String include = randomAsciiOfLengthBetween(3, 10); + String include = randomAlphaOfLengthBetween(3, 10); includes[i] = include; includesParam.append(include); if (i < numIncludes - 1) { @@ -679,7 +679,7 @@ public class RequestTests extends ESTestCase { String[] excludes = new String[numExcludes]; StringBuilder excludesParam = new StringBuilder(); for (int i = 0; i < numExcludes; i++) { - String exclude = randomAsciiOfLengthBetween(3, 10); + String exclude = randomAlphaOfLengthBetween(3, 10); excludes[i] = exclude; excludesParam.append(exclude); if (i < numExcludes - 1) { diff --git a/core/licenses/lucene-analyzers-common-6.5.0-snapshot-d00c5ca.jar.sha1 b/core/licenses/lucene-analyzers-common-6.5.0-snapshot-d00c5ca.jar.sha1 deleted file mode 100644 index 320a300a765..00000000000 --- a/core/licenses/lucene-analyzers-common-6.5.0-snapshot-d00c5ca.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9ad2a7bd252cbdb76ac121287e670d75f4db2cd3 \ No newline at end of file diff --git a/core/licenses/lucene-analyzers-common-6.5.0.jar.sha1 b/core/licenses/lucene-analyzers-common-6.5.0.jar.sha1 new file mode 100644 index 00000000000..77d21a23774 --- /dev/null +++ b/core/licenses/lucene-analyzers-common-6.5.0.jar.sha1 @@ -0,0 +1 @@ +3989779b05ecd0ace6affe19223b1c27156604f1 \ No newline at end of file diff --git a/core/licenses/lucene-backward-codecs-6.5.0-snapshot-d00c5ca.jar.sha1 b/core/licenses/lucene-backward-codecs-6.5.0-snapshot-d00c5ca.jar.sha1 deleted file mode 100644 index c313a86e271..00000000000 --- a/core/licenses/lucene-backward-codecs-6.5.0-snapshot-d00c5ca.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c6a940eff8a87df40262b752ed7b135e448b7873 \ No newline at end of file diff --git a/core/licenses/lucene-backward-codecs-6.5.0.jar.sha1 b/core/licenses/lucene-backward-codecs-6.5.0.jar.sha1 new file mode 100644 index 00000000000..9eaff80ac08 --- /dev/null +++ b/core/licenses/lucene-backward-codecs-6.5.0.jar.sha1 @@ -0,0 +1 @@ +6a8660e7133f357ef40d9cac26316ccd9937a2eb \ No newline at end of file diff --git a/core/licenses/lucene-core-6.5.0-snapshot-d00c5ca.jar.sha1 b/core/licenses/lucene-core-6.5.0-snapshot-d00c5ca.jar.sha1 deleted file mode 100644 index 8e2ce3f31ac..00000000000 --- a/core/licenses/lucene-core-6.5.0-snapshot-d00c5ca.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6ef5ad88141760c00ea041da1535f3ffc364d67d \ No newline at end of file diff --git a/core/licenses/lucene-core-6.5.0.jar.sha1 b/core/licenses/lucene-core-6.5.0.jar.sha1 new file mode 100644 index 00000000000..7af91ec15bf --- /dev/null +++ b/core/licenses/lucene-core-6.5.0.jar.sha1 @@ -0,0 +1 @@ +ff176c9bde4228b43827849f5d2ff2e2717e3297 \ No newline at end of file diff --git a/core/licenses/lucene-grouping-6.5.0-snapshot-d00c5ca.jar.sha1 b/core/licenses/lucene-grouping-6.5.0-snapshot-d00c5ca.jar.sha1 deleted file mode 100644 index b5e793e4d39..00000000000 --- a/core/licenses/lucene-grouping-6.5.0-snapshot-d00c5ca.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f15775571fb5762dfc92e00c3909cb8db8ff1d53 \ No newline at end of file diff --git a/core/licenses/lucene-grouping-6.5.0.jar.sha1 b/core/licenses/lucene-grouping-6.5.0.jar.sha1 new file mode 100644 index 00000000000..08ccc2cd086 --- /dev/null +++ b/core/licenses/lucene-grouping-6.5.0.jar.sha1 @@ -0,0 +1 @@ +10d2e5b36f460527ac9b948be0ec3077bde5b0ca \ No newline at end of file diff --git a/core/licenses/lucene-highlighter-6.5.0-snapshot-d00c5ca.jar.sha1 b/core/licenses/lucene-highlighter-6.5.0-snapshot-d00c5ca.jar.sha1 deleted file mode 100644 index c90084cc1cb..00000000000 --- a/core/licenses/lucene-highlighter-6.5.0-snapshot-d00c5ca.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -051d793aa64257beead4ccc7432eb5df81d17f23 \ No newline at end of file diff --git a/core/licenses/lucene-highlighter-6.5.0.jar.sha1 b/core/licenses/lucene-highlighter-6.5.0.jar.sha1 new file mode 100644 index 00000000000..a8069723f16 --- /dev/null +++ b/core/licenses/lucene-highlighter-6.5.0.jar.sha1 @@ -0,0 +1 @@ +0019bb6a631ea0123e8e553b0510fa81c9d3c3eb \ No newline at end of file diff --git a/core/licenses/lucene-join-6.5.0-snapshot-d00c5ca.jar.sha1 b/core/licenses/lucene-join-6.5.0-snapshot-d00c5ca.jar.sha1 deleted file mode 100644 index b6dfc376fbb..00000000000 --- a/core/licenses/lucene-join-6.5.0-snapshot-d00c5ca.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5bc4cba55670c14ea812ff5de65edad4c312fdf6 \ No newline at end of file diff --git a/core/licenses/lucene-join-6.5.0.jar.sha1 b/core/licenses/lucene-join-6.5.0.jar.sha1 new file mode 100644 index 00000000000..cbad6199d76 --- /dev/null +++ b/core/licenses/lucene-join-6.5.0.jar.sha1 @@ -0,0 +1 @@ +dad85baba266793b9ceb80a9b08c4ee9838e09df \ No newline at end of file diff --git a/core/licenses/lucene-memory-6.5.0-snapshot-d00c5ca.jar.sha1 b/core/licenses/lucene-memory-6.5.0-snapshot-d00c5ca.jar.sha1 deleted file mode 100644 index cfc1d044ca7..00000000000 --- a/core/licenses/lucene-memory-6.5.0-snapshot-d00c5ca.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -68cf08bcd8414a57493debf3a6a509d78a9abb56 \ No newline at end of file diff --git a/core/licenses/lucene-memory-6.5.0.jar.sha1 b/core/licenses/lucene-memory-6.5.0.jar.sha1 new file mode 100644 index 00000000000..5f22c0d9cfe --- /dev/null +++ b/core/licenses/lucene-memory-6.5.0.jar.sha1 @@ -0,0 +1 @@ +938f9f7efe8a403fd57c99aedd75d040d9caa896 \ No newline at end of file diff --git a/core/licenses/lucene-misc-6.5.0-snapshot-d00c5ca.jar.sha1 b/core/licenses/lucene-misc-6.5.0-snapshot-d00c5ca.jar.sha1 deleted file mode 100644 index 938b26b5a4d..00000000000 --- a/core/licenses/lucene-misc-6.5.0-snapshot-d00c5ca.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f5d90756dbeda1218d723b7bea0799c88d621adb \ No newline at end of file diff --git a/core/licenses/lucene-misc-6.5.0.jar.sha1 b/core/licenses/lucene-misc-6.5.0.jar.sha1 new file mode 100644 index 00000000000..2b405d7f16a --- /dev/null +++ b/core/licenses/lucene-misc-6.5.0.jar.sha1 @@ -0,0 +1 @@ +afdff39ecb30f6e2c6f056a5bdfcb13d928a25af \ No newline at end of file diff --git a/core/licenses/lucene-queries-6.5.0-snapshot-d00c5ca.jar.sha1 b/core/licenses/lucene-queries-6.5.0-snapshot-d00c5ca.jar.sha1 deleted file mode 100644 index 31dcaaaaabc..00000000000 --- a/core/licenses/lucene-queries-6.5.0-snapshot-d00c5ca.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9298e7d1ed96e7beb63d7ccdce1a4502eb0fe484 \ No newline at end of file diff --git a/core/licenses/lucene-queries-6.5.0.jar.sha1 b/core/licenses/lucene-queries-6.5.0.jar.sha1 new file mode 100644 index 00000000000..9a046ce204f --- /dev/null +++ b/core/licenses/lucene-queries-6.5.0.jar.sha1 @@ -0,0 +1 @@ +8e3971a008070712d57b59cf1f7b44c0d9d3df25 \ No newline at end of file diff --git a/core/licenses/lucene-queryparser-6.5.0-snapshot-d00c5ca.jar.sha1 b/core/licenses/lucene-queryparser-6.5.0-snapshot-d00c5ca.jar.sha1 deleted file mode 100644 index 346d897a4cf..00000000000 --- a/core/licenses/lucene-queryparser-6.5.0-snapshot-d00c5ca.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -918de18963607af69dff38e4773c0bde89c73ae3 \ No newline at end of file diff --git a/core/licenses/lucene-queryparser-6.5.0.jar.sha1 b/core/licenses/lucene-queryparser-6.5.0.jar.sha1 new file mode 100644 index 00000000000..3136885ab92 --- /dev/null +++ b/core/licenses/lucene-queryparser-6.5.0.jar.sha1 @@ -0,0 +1 @@ +225b904edf91ccdffffa398e1924ebadd5677c09 \ No newline at end of file diff --git a/core/licenses/lucene-sandbox-6.5.0-snapshot-d00c5ca.jar.sha1 b/core/licenses/lucene-sandbox-6.5.0-snapshot-d00c5ca.jar.sha1 deleted file mode 100644 index d7e3a49e9ee..00000000000 --- a/core/licenses/lucene-sandbox-6.5.0-snapshot-d00c5ca.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a311a7d9f3e9a8fbf3a367a4e2731f9d4579732b \ No newline at end of file diff --git a/core/licenses/lucene-sandbox-6.5.0.jar.sha1 b/core/licenses/lucene-sandbox-6.5.0.jar.sha1 new file mode 100644 index 00000000000..e3787e336df --- /dev/null +++ b/core/licenses/lucene-sandbox-6.5.0.jar.sha1 @@ -0,0 +1 @@ +5c994fc5dc4f37133a861571211303d81c5d51ff \ No newline at end of file diff --git a/core/licenses/lucene-spatial-6.5.0-snapshot-d00c5ca.jar.sha1 b/core/licenses/lucene-spatial-6.5.0-snapshot-d00c5ca.jar.sha1 deleted file mode 100644 index 21062261226..00000000000 --- a/core/licenses/lucene-spatial-6.5.0-snapshot-d00c5ca.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -693bc4cb0e2e4465e0173c67ed0818071c4b460b \ No newline at end of file diff --git a/core/licenses/lucene-spatial-6.5.0.jar.sha1 b/core/licenses/lucene-spatial-6.5.0.jar.sha1 new file mode 100644 index 00000000000..cbadbfc42d7 --- /dev/null +++ b/core/licenses/lucene-spatial-6.5.0.jar.sha1 @@ -0,0 +1 @@ +553b7b13bef994f14076a85557df03cad67322e9 \ No newline at end of file diff --git a/core/licenses/lucene-spatial-extras-6.5.0-snapshot-d00c5ca.jar.sha1 b/core/licenses/lucene-spatial-extras-6.5.0-snapshot-d00c5ca.jar.sha1 deleted file mode 100644 index 8eae8b0675d..00000000000 --- a/core/licenses/lucene-spatial-extras-6.5.0-snapshot-d00c5ca.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0326f31e63c76d476c23488c7354265cf915350f \ No newline at end of file diff --git a/core/licenses/lucene-spatial-extras-6.5.0.jar.sha1 b/core/licenses/lucene-spatial-extras-6.5.0.jar.sha1 new file mode 100644 index 00000000000..f2ad71855f2 --- /dev/null +++ b/core/licenses/lucene-spatial-extras-6.5.0.jar.sha1 @@ -0,0 +1 @@ +73deae791d861820974600705ba06e9f801cbe56 \ No newline at end of file diff --git a/core/licenses/lucene-spatial3d-6.5.0-snapshot-d00c5ca.jar.sha1 b/core/licenses/lucene-spatial3d-6.5.0-snapshot-d00c5ca.jar.sha1 deleted file mode 100644 index 8ce95f97f11..00000000000 --- a/core/licenses/lucene-spatial3d-6.5.0-snapshot-d00c5ca.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -69a3a86e9d045f872408793ea411d49e0c577268 \ No newline at end of file diff --git a/core/licenses/lucene-spatial3d-6.5.0.jar.sha1 b/core/licenses/lucene-spatial3d-6.5.0.jar.sha1 new file mode 100644 index 00000000000..8fca9ac1ebc --- /dev/null +++ b/core/licenses/lucene-spatial3d-6.5.0.jar.sha1 @@ -0,0 +1 @@ +c2aad69500dac79338ef45f570cab47bec3d2724 \ No newline at end of file diff --git a/core/licenses/lucene-suggest-6.5.0-snapshot-d00c5ca.jar.sha1 b/core/licenses/lucene-suggest-6.5.0-snapshot-d00c5ca.jar.sha1 deleted file mode 100644 index 2941229bbe0..00000000000 --- a/core/licenses/lucene-suggest-6.5.0-snapshot-d00c5ca.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fabc05ca175150171cf60370877276b933716bcd \ No newline at end of file diff --git a/core/licenses/lucene-suggest-6.5.0.jar.sha1 b/core/licenses/lucene-suggest-6.5.0.jar.sha1 new file mode 100644 index 00000000000..62764fbbc32 --- /dev/null +++ b/core/licenses/lucene-suggest-6.5.0.jar.sha1 @@ -0,0 +1 @@ +acf211f2bf901dfc8155a46c5a42c5650edf74ef \ No newline at end of file diff --git a/core/src/main/java/org/apache/lucene/index/OneMergeHelper.java b/core/src/main/java/org/apache/lucene/index/OneMergeHelper.java index 99ef7f4dd7f..f8b8c617822 100644 --- a/core/src/main/java/org/apache/lucene/index/OneMergeHelper.java +++ b/core/src/main/java/org/apache/lucene/index/OneMergeHelper.java @@ -19,6 +19,8 @@ package org.apache.lucene.index; +import java.io.IOException; + /** * Allows pkg private access */ @@ -27,4 +29,33 @@ public class OneMergeHelper { public static String getSegmentName(MergePolicy.OneMerge merge) { return merge.info != null ? merge.info.info.name : "_na_"; } + + /** + * The current MB per second rate limit for this merge. + **/ + public static double getMbPerSec(Thread thread, MergePolicy.OneMerge merge) { + if (thread instanceof ConcurrentMergeScheduler.MergeThread) { + return ((ConcurrentMergeScheduler.MergeThread) thread).rateLimiter.getMBPerSec(); + } + assert false: "this is not merge thread"; + return Double.POSITIVE_INFINITY; + } + + /** + * Returns total bytes written by this merge. + **/ + public static long getTotalBytesWritten(Thread thread, + MergePolicy.OneMerge merge) throws IOException { + /** + * TODO: The number of bytes written during the merge should be accessible in OneMerge. + */ + if (thread instanceof ConcurrentMergeScheduler.MergeThread) { + return ((ConcurrentMergeScheduler.MergeThread) thread).rateLimiter + .getTotalBytesWritten(); + } + assert false: "this is not merge thread"; + return merge.totalBytesSize(); + } + + } diff --git a/core/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java b/core/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java index 169a89edbcf..b4d3c823439 100644 --- a/core/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java +++ b/core/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java @@ -26,7 +26,6 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.util.PriorityQueue; -import java.io.IOException; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -35,7 +34,7 @@ import java.util.Set; /** * Represents hits returned by {@link CollapsingTopDocsCollector#getTopDocs()}. */ -public class CollapseTopFieldDocs extends TopFieldDocs { +public final class CollapseTopFieldDocs extends TopFieldDocs { /** The field used for collapsing **/ public final String field; /** The collapse value for each top doc */ @@ -49,22 +48,59 @@ public class CollapseTopFieldDocs extends TopFieldDocs { } // Refers to one hit: - private static class ShardRef { + private static final class ShardRef { // Which shard (index into shardHits[]): final int shardIndex; + // True if we should use the incoming ScoreDoc.shardIndex for sort order + final boolean useScoreDocIndex; + // Which hit within the shard: int hitIndex; - ShardRef(int shardIndex) { + ShardRef(int shardIndex, boolean useScoreDocIndex) { this.shardIndex = shardIndex; + this.useScoreDocIndex = useScoreDocIndex; } @Override public String toString() { return "ShardRef(shardIndex=" + shardIndex + " hitIndex=" + hitIndex + ")"; } - }; + + int getShardIndex(ScoreDoc scoreDoc) { + if (useScoreDocIndex) { + if (scoreDoc.shardIndex == -1) { + throw new IllegalArgumentException("setShardIndex is false but TopDocs[" + + shardIndex + "].scoreDocs[" + hitIndex + "] is not set"); + } + return scoreDoc.shardIndex; + } else { + // NOTE: we don't assert that shardIndex is -1 here, because caller could in fact have set it but asked us to ignore it now + return shardIndex; + } + } + } + + /** + * if we need to tie-break since score / sort value are the same we first compare shard index (lower shard wins) + * and then iff shard index is the same we use the hit index. + */ + static boolean tieBreakLessThan(ShardRef first, ScoreDoc firstDoc, ShardRef second, ScoreDoc secondDoc) { + final int firstShardIndex = first.getShardIndex(firstDoc); + final int secondShardIndex = second.getShardIndex(secondDoc); + // Tie break: earlier shard wins + if (firstShardIndex < secondShardIndex) { + return true; + } else if (firstShardIndex > secondShardIndex) { + return false; + } else { + // Tie break in same shard: resolve however the + // shard had resolved it: + assert first.hitIndex != second.hitIndex; + return first.hitIndex < second.hitIndex; + } + } private static class MergeSortQueue extends PriorityQueue { // These are really FieldDoc instances: @@ -72,7 +108,7 @@ public class CollapseTopFieldDocs extends TopFieldDocs { final FieldComparator[] comparators; final int[] reverseMul; - MergeSortQueue(Sort sort, CollapseTopFieldDocs[] shardHits) throws IOException { + MergeSortQueue(Sort sort, CollapseTopFieldDocs[] shardHits) { super(shardHits.length); this.shardHits = new ScoreDoc[shardHits.length][]; for (int shardIDX = 0; shardIDX < shardHits.length; shardIDX++) { @@ -115,18 +151,7 @@ public class CollapseTopFieldDocs extends TopFieldDocs { return cmp < 0; } } - - // Tie break: earlier shard wins - if (first.shardIndex < second.shardIndex) { - return true; - } else if (first.shardIndex > second.shardIndex) { - return false; - } else { - // Tie break in same shard: resolve however the - // shard had resolved it: - assert first.hitIndex != second.hitIndex; - return first.hitIndex < second.hitIndex; - } + return tieBreakLessThan(first, firstFD, second, secondFD); } } @@ -135,7 +160,7 @@ public class CollapseTopFieldDocs extends TopFieldDocs { * the provided CollapseTopDocs, sorting by score. Each {@link CollapseTopFieldDocs} instance must be sorted. **/ public static CollapseTopFieldDocs merge(Sort sort, int start, int size, - CollapseTopFieldDocs[] shardHits) throws IOException { + CollapseTopFieldDocs[] shardHits, boolean setShardIndex) { String collapseField = shardHits[0].field; for (int i = 1; i < shardHits.length; i++) { if (collapseField.equals(shardHits[i].field) == false) { @@ -155,7 +180,7 @@ public class CollapseTopFieldDocs extends TopFieldDocs { totalHitCount += shard.totalHits; if (shard.scoreDocs != null && shard.scoreDocs.length > 0) { availHitCount += shard.scoreDocs.length; - queue.add(new ShardRef(shardIDX)); + queue.add(new ShardRef(shardIDX, setShardIndex == false)); maxScore = Math.max(maxScore, shard.getMaxScore()); } } @@ -192,7 +217,9 @@ public class CollapseTopFieldDocs extends TopFieldDocs { continue; } seen.add(collapseValue); - hit.shardIndex = ref.shardIndex; + if (setShardIndex) { + hit.shardIndex = ref.shardIndex; + } if (hitUpto >= start) { hitList.add(hit); collapseList.add(collapseValue); diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchException.java b/core/src/main/java/org/elasticsearch/ElasticsearchException.java index 5d5a98ce3a9..b6ed2cb7a2b 100644 --- a/core/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/core/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.support.replication.ReplicationOperation; import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -712,7 +713,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte * in id order below. If you want to remove an exception leave a tombstone comment and mark the id as null in * ExceptionSerializationTests.testIds.ids. */ - enum ElasticsearchExceptionHandle { + private enum ElasticsearchExceptionHandle { INDEX_SHARD_SNAPSHOT_FAILED_EXCEPTION(org.elasticsearch.index.snapshots.IndexShardSnapshotFailedException.class, org.elasticsearch.index.snapshots.IndexShardSnapshotFailedException::new, 0, UNKNOWN_VERSION_ADDED), DFS_PHASE_EXECUTION_EXCEPTION(org.elasticsearch.search.dfs.DfsPhaseExecutionException.class, @@ -1006,6 +1007,30 @@ public class ElasticsearchException extends RuntimeException implements ToXConte } } + /** + * Returns an array of all registered handle IDs. These are the IDs for every registered + * exception. + * + * @return an array of all registered handle IDs + */ + static int[] ids() { + return Arrays.stream(ElasticsearchExceptionHandle.values()).mapToInt(h -> h.id).toArray(); + } + + /** + * Returns an array of all registered pairs of handle IDs and exception classes. These pairs are + * provided for every registered exception. + * + * @return an array of all registered pairs of handle IDs and exception classes + */ + static Tuple>[] classes() { + @SuppressWarnings("unchecked") + final Tuple>[] ts = + Arrays.stream(ElasticsearchExceptionHandle.values()) + .map(h -> Tuple.tuple(h.id, h.exceptionClass)).toArray(Tuple[]::new); + return ts; + } + static { ID_TO_SUPPLIER = unmodifiableMap(Arrays .stream(ElasticsearchExceptionHandle.values()).collect(Collectors.toMap(e -> e.id, e -> e.constructor))); diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index 23b889ea592..735043a3a28 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -116,6 +116,8 @@ public class Version implements Comparable { public static final Version V_5_2_3_UNRELEASED = new Version(V_5_2_3_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_4_1); public static final int V_5_3_0_ID_UNRELEASED = 5030099; public static final Version V_5_3_0_UNRELEASED = new Version(V_5_3_0_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_4_1); + public static final int V_5_3_1_ID_UNRELEASED = 5030199; + public static final Version V_5_3_1_UNRELEASED = new Version(V_5_3_1_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_4_1); public static final int V_5_4_0_ID_UNRELEASED = 5040099; public static final Version V_5_4_0_UNRELEASED = new Version(V_5_4_0_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_5_0); public static final int V_6_0_0_alpha1_ID_UNRELEASED = 6000001; @@ -138,6 +140,10 @@ public class Version implements Comparable { switch (id) { case V_6_0_0_alpha1_ID_UNRELEASED: return V_6_0_0_alpha1_UNRELEASED; + case V_5_4_0_ID_UNRELEASED: + return V_5_4_0_UNRELEASED; + case V_5_3_1_ID_UNRELEASED: + return V_5_3_1_UNRELEASED; case V_5_3_0_ID_UNRELEASED: return V_5_3_0_UNRELEASED; case V_5_2_3_ID_UNRELEASED: diff --git a/core/src/main/java/org/elasticsearch/action/ActionListener.java b/core/src/main/java/org/elasticsearch/action/ActionListener.java index f9fafa9f95a..e0d91a90364 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionListener.java +++ b/core/src/main/java/org/elasticsearch/action/ActionListener.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.CheckedConsumer; import java.util.ArrayList; import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; /** diff --git a/core/src/main/java/org/elasticsearch/action/ActionModule.java b/core/src/main/java/org/elasticsearch/action/ActionModule.java index 94db7d20308..c1d0541d4ce 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/core/src/main/java/org/elasticsearch/action/ActionModule.java @@ -149,6 +149,9 @@ import org.elasticsearch.action.delete.DeleteAction; import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.explain.ExplainAction; import org.elasticsearch.action.explain.TransportExplainAction; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesAction; +import org.elasticsearch.action.fieldcaps.TransportFieldCapabilitiesAction; +import org.elasticsearch.action.fieldcaps.TransportFieldCapabilitiesIndexAction; import org.elasticsearch.action.fieldstats.FieldStatsAction; import org.elasticsearch.action.fieldstats.TransportFieldStatsAction; import org.elasticsearch.action.get.GetAction; @@ -205,6 +208,7 @@ import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ActionPlugin.ActionHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.rest.action.RestFieldCapabilitiesAction; import org.elasticsearch.rest.action.RestFieldStatsAction; import org.elasticsearch.rest.action.RestMainAction; import org.elasticsearch.rest.action.admin.cluster.RestCancelTasksAction; @@ -479,6 +483,8 @@ public class ActionModule extends AbstractModule { actions.register(DeleteStoredScriptAction.INSTANCE, TransportDeleteStoredScriptAction.class); actions.register(FieldStatsAction.INSTANCE, TransportFieldStatsAction.class); + actions.register(FieldCapabilitiesAction.INSTANCE, TransportFieldCapabilitiesAction.class, + TransportFieldCapabilitiesIndexAction.class); actions.register(PutPipelineAction.INSTANCE, PutPipelineTransportAction.class); actions.register(GetPipelineAction.INSTANCE, GetPipelineTransportAction.class); @@ -587,6 +593,7 @@ public class ActionModule extends AbstractModule { registerHandler.accept(new RestDeleteStoredScriptAction(settings, restController)); registerHandler.accept(new RestFieldStatsAction(settings, restController)); + registerHandler.accept(new RestFieldCapabilitiesAction(settings, restController)); // Tasks API registerHandler.accept(new RestListTasksAction(settings, restController, nodesInCluster)); diff --git a/core/src/main/java/org/elasticsearch/action/NotifyOnceListener.java b/core/src/main/java/org/elasticsearch/action/NotifyOnceListener.java new file mode 100644 index 00000000000..1b717dcc6c0 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/NotifyOnceListener.java @@ -0,0 +1,50 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action; + +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * A listener that ensures that only one of onResponse or onFailure is called. And the method + * the is called is only called once. Subclasses should implement notification logic with + * innerOnResponse and innerOnFailure. + */ +public abstract class NotifyOnceListener implements ActionListener { + + private final AtomicBoolean hasBeenCalled = new AtomicBoolean(false); + + protected abstract void innerOnResponse(Response response); + + protected abstract void innerOnFailure(Exception e); + + @Override + public final void onResponse(Response response) { + if (hasBeenCalled.compareAndSet(false, true)) { + innerOnResponse(response); + } + } + + @Override + public final void onFailure(Exception e) { + if (hasBeenCalled.compareAndSet(false, true)) { + innerOnFailure(e); + } + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java index 0d545ddfa70..27276b27dd9 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java @@ -25,6 +25,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; @@ -73,7 +74,8 @@ public class ClusterStatsNodes implements ToXContent { this.plugins.addAll(nodeResponse.nodeInfo().getPlugins().getPluginInfos()); // now do the stats that should be deduped by hardware (implemented by ip deduping) - TransportAddress publishAddress = nodeResponse.nodeInfo().getTransport().address().publishAddress(); + TransportAddress publishAddress = + nodeResponse.nodeInfo().getTransport().address().publishAddress(); final InetAddress inetAddress = publishAddress.address().getAddress(); if (!seenAddresses.add(inetAddress)) { continue; @@ -209,7 +211,8 @@ public class ClusterStatsNodes implements ToXContent { } @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + public XContentBuilder toXContent(XContentBuilder builder, Params params) + throws IOException { builder.field(Fields.TOTAL, total); for (Map.Entry entry : roles.entrySet()) { builder.field(entry.getKey(), entry.getValue()); @@ -280,7 +283,8 @@ public class ClusterStatsNodes implements ToXContent { } @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + public XContentBuilder toXContent(XContentBuilder builder, Params params) + throws IOException { builder.field(Fields.AVAILABLE_PROCESSORS, availableProcessors); builder.field(Fields.ALLOCATED_PROCESSORS, allocatedProcessors); builder.startArray(Fields.NAMES); @@ -326,7 +330,8 @@ public class ClusterStatsNodes implements ToXContent { // fd can be -1 if not supported on platform totalOpenFileDescriptors += fd; } - // we still do min max calc on -1, so we'll have an indication of it not being supported on one of the nodes. + // we still do min max calc on -1, so we'll have an indication + // of it not being supported on one of the nodes. minOpenFileDescriptors = Math.min(minOpenFileDescriptors, fd); maxOpenFileDescriptors = Math.max(maxOpenFileDescriptors, fd); } @@ -375,7 +380,8 @@ public class ClusterStatsNodes implements ToXContent { } @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + public XContentBuilder toXContent(XContentBuilder builder, Params params) + throws IOException { builder.startObject(Fields.CPU).field(Fields.PERCENT, cpuPercent).endObject(); if (count > 0) { builder.startObject(Fields.OPEN_FILE_DESCRIPTORS); @@ -479,7 +485,8 @@ public class ClusterStatsNodes implements ToXContent { } @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + public XContentBuilder toXContent(XContentBuilder builder, Params params) + throws IOException { builder.timeValueField(Fields.MAX_UPTIME_IN_MILLIS, Fields.MAX_UPTIME, maxUptime); builder.startArray(Fields.VERSIONS); for (ObjectIntCursor v : versions) { @@ -540,17 +547,25 @@ public class ClusterStatsNodes implements ToXContent { private final Map transportTypes; private final Map httpTypes; - private NetworkTypes(final List nodeInfos) { + NetworkTypes(final List nodeInfos) { final Map transportTypes = new HashMap<>(); final Map httpTypes = new HashMap<>(); for (final NodeInfo nodeInfo : nodeInfos) { final Settings settings = nodeInfo.getSettings(); final String transportType = - settings.get(NetworkModule.TRANSPORT_TYPE_KEY, NetworkModule.TRANSPORT_DEFAULT_TYPE_SETTING.get(settings)); + settings.get(NetworkModule.TRANSPORT_TYPE_KEY, + NetworkModule.TRANSPORT_DEFAULT_TYPE_SETTING.get(settings)); final String httpType = - settings.get(NetworkModule.HTTP_TYPE_KEY, NetworkModule.HTTP_DEFAULT_TYPE_SETTING.get(settings)); - transportTypes.computeIfAbsent(transportType, k -> new AtomicInteger()).incrementAndGet(); - httpTypes.computeIfAbsent(httpType, k -> new AtomicInteger()).incrementAndGet(); + settings.get(NetworkModule.HTTP_TYPE_KEY, + NetworkModule.HTTP_DEFAULT_TYPE_SETTING.get(settings)); + if (Strings.hasText(transportType)) { + transportTypes.computeIfAbsent(transportType, + k -> new AtomicInteger()).incrementAndGet(); + } + if (Strings.hasText(httpType)) { + httpTypes.computeIfAbsent(httpType, + k -> new AtomicInteger()).incrementAndGet(); + } } this.transportTypes = Collections.unmodifiableMap(transportTypes); this.httpTypes = Collections.unmodifiableMap(httpTypes); diff --git a/core/src/main/java/org/elasticsearch/action/bulk/MappingUpdatePerformer.java b/core/src/main/java/org/elasticsearch/action/bulk/MappingUpdatePerformer.java index 27d636d3d93..e33c54d6dd5 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/MappingUpdatePerformer.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/MappingUpdatePerformer.java @@ -20,48 +20,28 @@ package org.elasticsearch.action.bulk; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.common.Nullable; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; import java.util.Objects; public interface MappingUpdatePerformer { - /** - * Determine if any mappings need to be updated, and update them on the - * master node if necessary. Returnes a failed {@code Engine.IndexResult} - * in the event updating the mappings fails or null if successful. - * Throws a {@code ReplicationOperation.RetryOnPrimaryException} if the - * operation needs to be retried on the primary due to the mappings not - * being present yet, or a different exception if updating the mappings - * on the master failed. - */ - @Nullable - MappingUpdateResult updateMappingsIfNeeded(IndexShard primary, IndexRequest request) throws Exception; /** - * Class encapsulating the resulting of potentially updating the mapping + * Determine if any mappings need to be updated, and update them on the master node if + * necessary. Returnes a failure Exception in the event updating the mappings fails or null if + * successful. */ - class MappingUpdateResult { - @Nullable - public final Engine.Index operation; - @Nullable - public final Exception failure; + void updateMappingsIfNeeded(Engine.Index operation, + ShardId shardId, + String type) throws Exception; - MappingUpdateResult(Exception failure) { - Objects.requireNonNull(failure, "failure cannot be null"); - this.failure = failure; - this.operation = null; - } + /** + * Throws a {@code ReplicationOperation.RetryOnPrimaryException} if the operation needs to be + * retried on the primary due to the mappings not being present yet, or a different exception if + * updating the mappings on the master failed. + */ + void verifyMappings(Engine.Index operation, ShardId shardId) throws Exception; - MappingUpdateResult(Engine.Index operation) { - Objects.requireNonNull(operation, "operation cannot be null"); - this.operation = operation; - this.failure = null; - } - - public boolean isFailed() { - return failure != null; - } - } } diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index d1bba8d2d4a..6a286c5a758 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -65,6 +65,9 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.index.translog.Translog.Location; +import org.elasticsearch.action.bulk.BulkItemResultHolder; +import org.elasticsearch.action.bulk.BulkItemResponse; import java.io.IOException; import java.util.Map; @@ -154,10 +157,23 @@ public class TransportShardBulkAction extends TransportWriteAction(request, location, null, replica, logger); } - private static Translog.Location locationToSync(Translog.Location current, Translog.Location next) { - /* here we are moving forward in the translog with each operation. Under the hood - * this might cross translog files which is ok since from the user perspective - * the translog is like a tape where only the highest location needs to be fsynced - * in order to sync all previous locations even though they are not in the same file. - * When the translog rolls over files the previous file is fsynced on after closing if needed.*/ + private static Translog.Location locationToSync(Translog.Location current, + Translog.Location next) { + /* here we are moving forward in the translog with each operation. Under the hood this might + * cross translog files which is ok since from the user perspective the translog is like a + * tape where only the highest location needs to be fsynced in order to sync all previous + * locations even though they are not in the same file. When the translog rolls over files + * the previous file is fsynced on after closing if needed.*/ assert next != null : "next operation can't be null"; - assert current == null || current.compareTo(next) < 0 : "translog locations are not increasing"; + assert current == null || current.compareTo(next) < 0 : + "translog locations are not increasing"; return next; } @@ -411,45 +429,82 @@ public class TransportShardBulkAction extends TransportWriteAction> results; + private final AtomicArray results; private final AtomicInteger counter; public ParentBulkByScrollTask(long id, String type, String action, String description, TaskId parentTaskId, int slices) { @@ -82,13 +82,11 @@ public class ParentBulkByScrollTask extends BulkByScrollTask { } private void addResultsToList(List sliceStatuses) { - for (AtomicArray.Entry> t : results.asList()) { - if (t.value != null) { - if (t.value.v1() != null) { - sliceStatuses.set(t.index, new StatusOrException(t.value.v1().getStatus())); - } else { - sliceStatuses.set(t.index, new StatusOrException(t.value.v2())); - } + for (Result t : results.asList()) { + if (t.response != null) { + sliceStatuses.set(t.sliceId, new StatusOrException(t.response.getStatus())); + } else { + sliceStatuses.set(t.sliceId, new StatusOrException(t.failure)); } } } @@ -97,7 +95,7 @@ public class ParentBulkByScrollTask extends BulkByScrollTask { * Record a response from a slice and respond to the listener if the request is finished. */ public void onSliceResponse(ActionListener listener, int sliceId, BulkByScrollResponse response) { - results.setOnce(sliceId, new Tuple<>(response, null)); + results.setOnce(sliceId, new Result(sliceId, response)); /* If the request isn't finished we could automatically rethrottle the sub-requests here but we would only want to do that if we * were fairly sure they had a while left to go. */ recordSliceCompletionAndRespondIfAllDone(listener); @@ -107,7 +105,7 @@ public class ParentBulkByScrollTask extends BulkByScrollTask { * Record a failure from a slice and respond to the listener if the request is finished. */ void onSliceFailure(ActionListener listener, int sliceId, Exception e) { - results.setOnce(sliceId, new Tuple<>(null, e)); + results.setOnce(sliceId, new Result(sliceId, e)); recordSliceCompletionAndRespondIfAllDone(listener); // TODO cancel when a slice fails? } @@ -118,17 +116,17 @@ public class ParentBulkByScrollTask extends BulkByScrollTask { } List responses = new ArrayList<>(results.length()); Exception exception = null; - for (AtomicArray.Entry> t : results.asList()) { - if (t.value.v1() == null) { - assert t.value.v2() != null : "exception shouldn't be null if value is null"; + for (Result t : results.asList()) { + if (t.response == null) { + assert t.failure != null : "exception shouldn't be null if value is null"; if (exception == null) { - exception = t.value.v2(); + exception = t.failure; } else { - exception.addSuppressed(t.value.v2()); + exception.addSuppressed(t.failure); } } else { - assert t.value.v2() == null : "exception should be null if response is not null"; - responses.add(t.value.v1()); + assert t.failure == null : "exception should be null if response is not null"; + responses.add(t.response); } } if (exception == null) { @@ -138,4 +136,21 @@ public class ParentBulkByScrollTask extends BulkByScrollTask { } } + private static final class Result { + final BulkByScrollResponse response; + final int sliceId; + final Exception failure; + + private Result(int sliceId, BulkByScrollResponse response) { + this.sliceId = sliceId; + this.response = response; + failure = null; + } + + private Result(int sliceId, Exception failure) { + this.sliceId = sliceId; + this.failure = failure; + response = null; + } + } } diff --git a/core/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java b/core/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java new file mode 100644 index 00000000000..ef7513f38ab --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java @@ -0,0 +1,282 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.fieldcaps; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Collections; +import java.util.Arrays; +import java.util.List; +import java.util.ArrayList; +import java.util.Comparator; + +/** + * Describes the capabilities of a field optionally merged across multiple indices. + */ +public class FieldCapabilities implements Writeable, ToXContent { + private final String name; + private final String type; + private final boolean isSearchable; + private final boolean isAggregatable; + + private final String[] indices; + private final String[] nonSearchableIndices; + private final String[] nonAggregatableIndices; + + /** + * Constructor + * @param name The name of the field. + * @param type The type associated with the field. + * @param isSearchable Whether this field is indexed for search. + * @param isAggregatable Whether this field can be aggregated on. + */ + FieldCapabilities(String name, String type, boolean isSearchable, boolean isAggregatable) { + this(name, type, isSearchable, isAggregatable, null, null, null); + } + + /** + * Constructor + * @param name The name of the field + * @param type The type associated with the field. + * @param isSearchable Whether this field is indexed for search. + * @param isAggregatable Whether this field can be aggregated on. + * @param indices The list of indices where this field name is defined as {@code type}, + * or null if all indices have the same {@code type} for the field. + * @param nonSearchableIndices The list of indices where this field is not searchable, + * or null if the field is searchable in all indices. + * @param nonAggregatableIndices The list of indices where this field is not aggregatable, + * or null if the field is aggregatable in all indices. + */ + FieldCapabilities(String name, String type, + boolean isSearchable, boolean isAggregatable, + String[] indices, + String[] nonSearchableIndices, + String[] nonAggregatableIndices) { + this.name = name; + this.type = type; + this.isSearchable = isSearchable; + this.isAggregatable = isAggregatable; + this.indices = indices; + this.nonSearchableIndices = nonSearchableIndices; + this.nonAggregatableIndices = nonAggregatableIndices; + } + + FieldCapabilities(StreamInput in) throws IOException { + this.name = in.readString(); + this.type = in.readString(); + this.isSearchable = in.readBoolean(); + this.isAggregatable = in.readBoolean(); + this.indices = in.readOptionalStringArray(); + this.nonSearchableIndices = in.readOptionalStringArray(); + this.nonAggregatableIndices = in.readOptionalStringArray(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(name); + out.writeString(type); + out.writeBoolean(isSearchable); + out.writeBoolean(isAggregatable); + out.writeOptionalStringArray(indices); + out.writeOptionalStringArray(nonSearchableIndices); + out.writeOptionalStringArray(nonAggregatableIndices); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("type", type); + builder.field("searchable", isSearchable); + builder.field("aggregatable", isAggregatable); + if (indices != null) { + builder.field("indices", indices); + } + if (nonSearchableIndices != null) { + builder.field("non_searchable_indices", nonSearchableIndices); + } + if (nonAggregatableIndices != null) { + builder.field("non_aggregatable_indices", nonAggregatableIndices); + } + builder.endObject(); + return builder; + } + + /** + * The name of the field. + */ + public String getName() { + return name; + } + + /** + * Whether this field is indexed for search on all indices. + */ + public boolean isAggregatable() { + return isAggregatable; + } + + /** + * Whether this field can be aggregated on all indices. + */ + public boolean isSearchable() { + return isSearchable; + } + + /** + * The type of the field. + */ + public String getType() { + return type; + } + + /** + * The list of indices where this field name is defined as {@code type}, + * or null if all indices have the same {@code type} for the field. + */ + public String[] indices() { + return indices; + } + + /** + * The list of indices where this field is not searchable, + * or null if the field is searchable in all indices. + */ + public String[] nonSearchableIndices() { + return nonSearchableIndices; + } + + /** + * The list of indices where this field is not aggregatable, + * or null if the field is aggregatable in all indices. + */ + public String[] nonAggregatableIndices() { + return nonAggregatableIndices; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + FieldCapabilities that = (FieldCapabilities) o; + + if (isSearchable != that.isSearchable) return false; + if (isAggregatable != that.isAggregatable) return false; + if (!name.equals(that.name)) return false; + if (!type.equals(that.type)) return false; + if (!Arrays.equals(indices, that.indices)) return false; + if (!Arrays.equals(nonSearchableIndices, that.nonSearchableIndices)) return false; + return Arrays.equals(nonAggregatableIndices, that.nonAggregatableIndices); + } + + @Override + public int hashCode() { + int result = name.hashCode(); + result = 31 * result + type.hashCode(); + result = 31 * result + (isSearchable ? 1 : 0); + result = 31 * result + (isAggregatable ? 1 : 0); + result = 31 * result + Arrays.hashCode(indices); + result = 31 * result + Arrays.hashCode(nonSearchableIndices); + result = 31 * result + Arrays.hashCode(nonAggregatableIndices); + return result; + } + + static class Builder { + private String name; + private String type; + private boolean isSearchable; + private boolean isAggregatable; + private List indiceList; + + Builder(String name, String type) { + this.name = name; + this.type = type; + this.isSearchable = true; + this.isAggregatable = true; + this.indiceList = new ArrayList<>(); + } + + void add(String index, boolean search, boolean agg) { + IndexCaps indexCaps = new IndexCaps(index, search, agg); + indiceList.add(indexCaps); + this.isSearchable &= search; + this.isAggregatable &= agg; + } + + FieldCapabilities build(boolean withIndices) { + final String[] indices; + /* Eclipse can't deal with o -> o.name, maybe because of + * https://bugs.eclipse.org/bugs/show_bug.cgi?id=511750 */ + Collections.sort(indiceList, Comparator.comparing((IndexCaps o) -> o.name)); + if (withIndices) { + indices = indiceList.stream() + .map(caps -> caps.name) + .toArray(String[]::new); + } else { + indices = null; + } + + final String[] nonSearchableIndices; + if (isSearchable == false && + indiceList.stream().anyMatch((caps) -> caps.isSearchable)) { + // Iff this field is searchable in some indices AND non-searchable in others + // we record the list of non-searchable indices + nonSearchableIndices = indiceList.stream() + .filter((caps) -> caps.isSearchable == false) + .map(caps -> caps.name) + .toArray(String[]::new); + } else { + nonSearchableIndices = null; + } + + final String[] nonAggregatableIndices; + if (isAggregatable == false && + indiceList.stream().anyMatch((caps) -> caps.isAggregatable)) { + // Iff this field is aggregatable in some indices AND non-searchable in others + // we keep the list of non-aggregatable indices + nonAggregatableIndices = indiceList.stream() + .filter((caps) -> caps.isAggregatable == false) + .map(caps -> caps.name) + .toArray(String[]::new); + } else { + nonAggregatableIndices = null; + } + return new FieldCapabilities(name, type, isSearchable, isAggregatable, + indices, nonSearchableIndices, nonAggregatableIndices); + } + } + + private static class IndexCaps { + final String name; + final boolean isSearchable; + final boolean isAggregatable; + + IndexCaps(String name, boolean isSearchable, boolean isAggregatable) { + this.name = name; + this.isSearchable = isSearchable; + this.isAggregatable = isAggregatable; + } + } +} diff --git a/core/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesAction.java b/core/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesAction.java new file mode 100644 index 00000000000..93d67f3fc3c --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesAction.java @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.fieldcaps; + +import org.elasticsearch.action.Action; +import org.elasticsearch.client.ElasticsearchClient; + +public class FieldCapabilitiesAction extends Action { + + public static final FieldCapabilitiesAction INSTANCE = new FieldCapabilitiesAction(); + public static final String NAME = "indices:data/read/field_caps"; + + private FieldCapabilitiesAction() { + super(NAME); + } + + @Override + public FieldCapabilitiesResponse newResponse() { + return new FieldCapabilitiesResponse(); + } + + @Override + public FieldCapabilitiesRequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new FieldCapabilitiesRequestBuilder(client, this); + } +} diff --git a/core/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexRequest.java b/core/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexRequest.java new file mode 100644 index 00000000000..460a21ae866 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexRequest.java @@ -0,0 +1,65 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.fieldcaps; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.single.shard.SingleShardRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +public class FieldCapabilitiesIndexRequest + extends SingleShardRequest { + + private String[] fields; + + // For serialization + FieldCapabilitiesIndexRequest() {} + + FieldCapabilitiesIndexRequest(String[] fields, String index) { + super(index); + if (fields == null || fields.length == 0) { + throw new IllegalArgumentException("specified fields can't be null or empty"); + } + this.fields = fields; + } + + public String[] fields() { + return fields; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + fields = in.readStringArray(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(fields); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } +} diff --git a/core/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java b/core/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java new file mode 100644 index 00000000000..de520ee6274 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java @@ -0,0 +1,97 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.fieldcaps; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Map; + +/** + * Response for {@link FieldCapabilitiesIndexRequest} requests. + */ +public class FieldCapabilitiesIndexResponse extends ActionResponse { + private String indexName; + private Map responseMap; + + FieldCapabilitiesIndexResponse(String indexName, Map responseMap) { + this.indexName = indexName; + this.responseMap = responseMap; + } + + FieldCapabilitiesIndexResponse() { + } + + + /** + * Get the index name + */ + public String getIndexName() { + return indexName; + } + + /** + * Get the field capabilities map + */ + public Map get() { + return responseMap; + } + + /** + * + * Get the field capabilities for the provided {@code field} + */ + public FieldCapabilities getField(String field) { + return responseMap.get(field); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + this.indexName = in.readString(); + this.responseMap = + in.readMap(StreamInput::readString, FieldCapabilities::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(indexName); + out.writeMap(responseMap, + StreamOutput::writeString, (valueOut, fc) -> fc.writeTo(valueOut)); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + FieldCapabilitiesIndexResponse that = (FieldCapabilitiesIndexResponse) o; + + return responseMap.equals(that.responseMap); + } + + @Override + public int hashCode() { + return responseMap.hashCode(); + } +} diff --git a/core/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java b/core/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java new file mode 100644 index 00000000000..7eab9112162 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java @@ -0,0 +1,144 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.fieldcaps; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.ValidateActions; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +import static org.elasticsearch.common.xcontent.ObjectParser.fromList; + +public class FieldCapabilitiesRequest extends ActionRequest + implements IndicesRequest.Replaceable { + public static final ParseField FIELDS_FIELD = new ParseField("fields"); + public static final String NAME = "field_caps_request"; + private String[] indices = Strings.EMPTY_ARRAY; + private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpen(); + private String[] fields = Strings.EMPTY_ARRAY; + + private static ObjectParser PARSER = + new ObjectParser<>(NAME, FieldCapabilitiesRequest::new); + + static { + PARSER.declareStringArray(fromList(String.class, FieldCapabilitiesRequest::fields), + FIELDS_FIELD); + } + + public FieldCapabilitiesRequest() {} + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + fields = in.readStringArray(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(fields); + } + + public static FieldCapabilitiesRequest parseFields(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + /** + * The list of field names to retrieve + */ + public FieldCapabilitiesRequest fields(String... fields) { + if (fields == null || fields.length == 0) { + throw new IllegalArgumentException("specified fields can't be null or empty"); + } + Set fieldSet = new HashSet<>(Arrays.asList(fields)); + this.fields = fieldSet.toArray(new String[0]); + return this; + } + + public String[] fields() { + return fields; + } + + /** + * + * The list of indices to lookup + */ + public FieldCapabilitiesRequest indices(String... indices) { + this.indices = indices; + return this; + } + + public FieldCapabilitiesRequest indicesOptions(IndicesOptions indicesOptions) { + this.indicesOptions = indicesOptions; + return this; + } + + @Override + public String[] indices() { + return indices; + } + + @Override + public IndicesOptions indicesOptions() { + return indicesOptions; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (fields == null || fields.length == 0) { + validationException = + ValidateActions.addValidationError("no fields specified", validationException); + } + return validationException; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + FieldCapabilitiesRequest that = (FieldCapabilitiesRequest) o; + + if (!Arrays.equals(indices, that.indices)) return false; + if (!indicesOptions.equals(that.indicesOptions)) return false; + return Arrays.equals(fields, that.fields); + } + + @Override + public int hashCode() { + int result = Arrays.hashCode(indices); + result = 31 * result + indicesOptions.hashCode(); + result = 31 * result + Arrays.hashCode(fields); + return result; + } +} diff --git a/core/src/main/java/org/elasticsearch/search/query/QuerySearchResultProvider.java b/core/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestBuilder.java similarity index 51% rename from core/src/main/java/org/elasticsearch/search/query/QuerySearchResultProvider.java rename to core/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestBuilder.java index 852a97e5248..742d5b3ee32 100644 --- a/core/src/main/java/org/elasticsearch/search/query/QuerySearchResultProvider.java +++ b/core/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestBuilder.java @@ -17,25 +17,25 @@ * under the License. */ -package org.elasticsearch.search.query; +package org.elasticsearch.action.fieldcaps; -import org.elasticsearch.search.SearchPhaseResult; -import org.elasticsearch.search.fetch.FetchSearchResult; -import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; -public abstract class QuerySearchResultProvider extends TransportResponse implements SearchPhaseResult { - - /** - * Returns the query result iff it's included in this response otherwise null - */ - public QuerySearchResult queryResult() { - return null; +public class FieldCapabilitiesRequestBuilder extends + ActionRequestBuilder { + public FieldCapabilitiesRequestBuilder(ElasticsearchClient client, + FieldCapabilitiesAction action, + String... indices) { + super(client, action, new FieldCapabilitiesRequest().indices(indices)); } /** - * Returns the fetch result iff it's included in this response otherwise null + * The list of field names to retrieve. */ - public FetchSearchResult fetchResult() { - return null; + public FieldCapabilitiesRequestBuilder setFields(String... fields) { + request().fields(fields); + return this; } } diff --git a/core/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java b/core/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java new file mode 100644 index 00000000000..9ff2cf3850b --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java @@ -0,0 +1,106 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.fieldcaps; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; + +/** + * Response for {@link FieldCapabilitiesRequest} requests. + */ +public class FieldCapabilitiesResponse extends ActionResponse implements ToXContent { + private Map> responseMap; + + FieldCapabilitiesResponse(Map> responseMap) { + this.responseMap = responseMap; + } + + /** + * Used for serialization + */ + FieldCapabilitiesResponse() { + this.responseMap = Collections.emptyMap(); + } + + /** + * Get the field capabilities map. + */ + public Map> get() { + return responseMap; + } + + /** + * + * Get the field capabilities per type for the provided {@code field}. + */ + public Map getField(String field) { + return responseMap.get(field); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + this.responseMap = + in.readMap(StreamInput::readString, FieldCapabilitiesResponse::readField); + } + + private static Map readField(StreamInput in) throws IOException { + return in.readMap(StreamInput::readString, FieldCapabilities::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeMap(responseMap, StreamOutput::writeString, FieldCapabilitiesResponse::writeField); + } + + private static void writeField(StreamOutput out, + Map map) throws IOException { + out.writeMap(map, StreamOutput::writeString, (valueOut, fc) -> fc.writeTo(valueOut)); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field("fields", responseMap); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + FieldCapabilitiesResponse that = (FieldCapabilitiesResponse) o; + + return responseMap.equals(that.responseMap); + } + + @Override + public int hashCode() { + return responseMap.hashCode(); + } +} diff --git a/core/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java b/core/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java new file mode 100644 index 00000000000..a7f268eaf5d --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java @@ -0,0 +1,134 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.fieldcaps; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReferenceArray; + +public class TransportFieldCapabilitiesAction + extends HandledTransportAction { + private final ClusterService clusterService; + private final TransportFieldCapabilitiesIndexAction shardAction; + + @Inject + public TransportFieldCapabilitiesAction(Settings settings, TransportService transportService, + ClusterService clusterService, ThreadPool threadPool, + TransportFieldCapabilitiesIndexAction shardAction, + ActionFilters actionFilters, + IndexNameExpressionResolver + indexNameExpressionResolver) { + super(settings, FieldCapabilitiesAction.NAME, threadPool, transportService, + actionFilters, indexNameExpressionResolver, FieldCapabilitiesRequest::new); + this.clusterService = clusterService; + this.shardAction = shardAction; + } + + @Override + protected void doExecute(FieldCapabilitiesRequest request, + final ActionListener listener) { + ClusterState clusterState = clusterService.state(); + String[] concreteIndices = + indexNameExpressionResolver.concreteIndexNames(clusterState, request); + final AtomicInteger indexCounter = new AtomicInteger(); + final AtomicInteger completionCounter = new AtomicInteger(concreteIndices.length); + final AtomicReferenceArray indexResponses = + new AtomicReferenceArray<>(concreteIndices.length); + if (concreteIndices.length == 0) { + listener.onResponse(new FieldCapabilitiesResponse()); + } else { + for (String index : concreteIndices) { + FieldCapabilitiesIndexRequest indexRequest = + new FieldCapabilitiesIndexRequest(request.fields(), index); + shardAction.execute(indexRequest, + new ActionListener () { + @Override + public void onResponse(FieldCapabilitiesIndexResponse result) { + indexResponses.set(indexCounter.getAndIncrement(), result); + if (completionCounter.decrementAndGet() == 0) { + listener.onResponse(merge(indexResponses)); + } + } + + @Override + public void onFailure(Exception e) { + indexResponses.set(indexCounter.getAndIncrement(), e); + if (completionCounter.decrementAndGet() == 0) { + listener.onResponse(merge(indexResponses)); + } + } + }); + } + } + } + + private FieldCapabilitiesResponse merge(AtomicReferenceArray indexResponses) { + Map> responseMapBuilder = new HashMap<> (); + for (int i = 0; i < indexResponses.length(); i++) { + Object element = indexResponses.get(i); + if (element instanceof FieldCapabilitiesIndexResponse == false) { + assert element instanceof Exception; + continue; + } + FieldCapabilitiesIndexResponse response = (FieldCapabilitiesIndexResponse) element; + for (String field : response.get().keySet()) { + Map typeMap = responseMapBuilder.get(field); + if (typeMap == null) { + typeMap = new HashMap<> (); + responseMapBuilder.put(field, typeMap); + } + FieldCapabilities fieldCap = response.getField(field); + FieldCapabilities.Builder builder = typeMap.get(fieldCap.getType()); + if (builder == null) { + builder = new FieldCapabilities.Builder(field, fieldCap.getType()); + typeMap.put(fieldCap.getType(), builder); + } + builder.add(response.getIndexName(), + fieldCap.isSearchable(), fieldCap.isAggregatable()); + } + } + + Map> responseMap = new HashMap<>(); + for (Map.Entry> entry : + responseMapBuilder.entrySet()) { + Map typeMap = new HashMap<>(); + boolean multiTypes = entry.getValue().size() > 1; + for (Map.Entry fieldEntry : + entry.getValue().entrySet()) { + typeMap.put(fieldEntry.getKey(), fieldEntry.getValue().build(multiTypes)); + } + responseMap.put(entry.getKey(), typeMap); + } + + return new FieldCapabilitiesResponse(responseMap); + } +} diff --git a/core/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java b/core/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java new file mode 100644 index 00000000000..5bab7276860 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java @@ -0,0 +1,121 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.fieldcaps; + +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +public class TransportFieldCapabilitiesIndexAction + extends TransportSingleShardAction { + + private static final String ACTION_NAME = FieldCapabilitiesAction.NAME + "[index]"; + + protected final ClusterService clusterService; + private final IndicesService indicesService; + + @Inject + public TransportFieldCapabilitiesIndexAction(Settings settings, + ClusterService clusterService, + TransportService transportService, + IndicesService indicesService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver + indexNameExpressionResolver) { + super(settings, + ACTION_NAME, + threadPool, + clusterService, + transportService, + actionFilters, + indexNameExpressionResolver, + FieldCapabilitiesIndexRequest::new, + ThreadPool.Names.MANAGEMENT); + this.clusterService = clusterService; + this.indicesService = indicesService; + } + + @Override + protected boolean resolveIndex(FieldCapabilitiesIndexRequest request) { + //internal action, index already resolved + return false; + } + + @Override + protected ShardsIterator shards(ClusterState state, InternalRequest request) { + // Will balance requests between shards + // Resolve patterns and deduplicate + return state.routingTable().index(request.concreteIndex()).randomAllActiveShardsIt(); + } + + @Override + protected FieldCapabilitiesIndexResponse shardOperation( + final FieldCapabilitiesIndexRequest request, + ShardId shardId) { + MapperService mapperService = + indicesService.indexServiceSafe(shardId.getIndex()).mapperService(); + Set fieldNames = new HashSet<>(); + for (String field : request.fields()) { + fieldNames.addAll(mapperService.simpleMatchToIndexNames(field)); + } + Map responseMap = new HashMap<>(); + for (String field : fieldNames) { + MappedFieldType ft = mapperService.fullName(field); + FieldCapabilities fieldCap = new FieldCapabilities(field, + ft.typeName(), + ft.isSearchable(), + ft.isAggregatable()); + responseMap.put(field, fieldCap); + } + return new FieldCapabilitiesIndexResponse(shardId.getIndexName(), responseMap); + } + + @Override + protected FieldCapabilitiesIndexResponse newResponse() { + return new FieldCapabilitiesIndexResponse(); + } + + @Override + protected ClusterBlockException checkRequestBlock(ClusterState state, + InternalRequest request) { + return state.blocks().indexBlockedException(ClusterBlockLevel.METADATA_READ, + request.concreteIndex()); + } +} diff --git a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java index d4627391b11..20a619cec2c 100644 --- a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java +++ b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java @@ -44,6 +44,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; +import java.util.Locale; public class MultiGetRequest extends ActionRequest implements Iterable, CompositeIndicesRequest, RealtimeRequest { @@ -319,6 +320,14 @@ public class MultiGetRequest extends ActionRequest implements Iterable exten } else { if (logger.isTraceEnabled()) { final String resultsFrom = results.getSuccessfulResults() - .map(r -> r.shardTarget().toString()).collect(Collectors.joining(",")); + .map(r -> r.getSearchShardTarget().toString()).collect(Collectors.joining(",")); logger.trace("[{}] Moving to next phase: [{}], based on results from: {} (cluster state version: {})", currentPhase.getName(), nextPhase.getName(), resultsFrom, clusterStateVersion); } @@ -159,10 +159,10 @@ abstract class AbstractSearchAsyncAction exten if (shardFailures == null) { return ShardSearchFailure.EMPTY_ARRAY; } - List> entries = shardFailures.asList(); + List entries = shardFailures.asList(); ShardSearchFailure[] failures = new ShardSearchFailure[entries.size()]; for (int i = 0; i < failures.length; i++) { - failures[i] = entries.get(i).value; + failures[i] = entries.get(i); } return failures; } @@ -209,8 +209,8 @@ abstract class AbstractSearchAsyncAction exten private void raisePhaseFailure(SearchPhaseExecutionException exception) { results.getSuccessfulResults().forEach((entry) -> { try { - Transport.Connection connection = nodeIdToConnection.apply(entry.shardTarget().getNodeId()); - sendReleaseSearchContext(entry.id(), connection); + Transport.Connection connection = nodeIdToConnection.apply(entry.getSearchShardTarget().getNodeId()); + sendReleaseSearchContext(entry.getRequestId(), connection); } catch (Exception inner) { inner.addSuppressed(exception); logger.trace("failed to release context", inner); @@ -220,18 +220,18 @@ abstract class AbstractSearchAsyncAction exten } @Override - public final void onShardSuccess(int shardIndex, Result result) { + public final void onShardSuccess(Result result) { successfulOps.incrementAndGet(); - results.consumeResult(shardIndex, result); + results.consumeResult(result); if (logger.isTraceEnabled()) { - logger.trace("got first-phase result from {}", result != null ? result.shardTarget() : null); + logger.trace("got first-phase result from {}", result != null ? result.getSearchShardTarget() : null); } // clean a previous error on this shard group (note, this code will be serialized on the same shardIndex value level // so its ok concurrency wise to miss potentially the shard failures being created because of another failure // in the #addShardFailure, because by definition, it will happen on *another* shardIndex AtomicArray shardFailures = this.shardFailures.get(); if (shardFailures != null) { - shardFailures.set(shardIndex, null); + shardFailures.set(result.getShardIndex(), null); } } diff --git a/core/src/main/java/org/elasticsearch/action/search/CountedCollector.java b/core/src/main/java/org/elasticsearch/action/search/CountedCollector.java index 65f2d2d280b..2dd255aa14c 100644 --- a/core/src/main/java/org/elasticsearch/action/search/CountedCollector.java +++ b/core/src/main/java/org/elasticsearch/action/search/CountedCollector.java @@ -23,18 +23,20 @@ import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; +import java.util.function.Consumer; + /** * This is a simple base class to simplify fan out to shards and collect their results. Each results passed to - * {@link #onResult(int, SearchPhaseResult, SearchShardTarget)} will be set to the provided result array + * {@link #onResult(SearchPhaseResult)} will be set to the provided result array * where the given index is used to set the result on the array. */ final class CountedCollector { - private final ResultConsumer resultConsumer; + private final Consumer resultConsumer; private final CountDown counter; private final Runnable onFinish; private final SearchPhaseContext context; - CountedCollector(ResultConsumer resultConsumer, int expectedOps, Runnable onFinish, SearchPhaseContext context) { + CountedCollector(Consumer resultConsumer, int expectedOps, Runnable onFinish, SearchPhaseContext context) { this.resultConsumer = resultConsumer; this.counter = new CountDown(expectedOps); this.onFinish = onFinish; @@ -55,10 +57,9 @@ final class CountedCollector { /** * Sets the result to the given array index and then runs {@link #countDown()} */ - void onResult(int index, R result, SearchShardTarget target) { + void onResult(R result) { try { - result.shardTarget(target); - resultConsumer.consume(index, result); + resultConsumer.accept(result); } finally { countDown(); } @@ -75,12 +76,4 @@ final class CountedCollector { countDown(); } } - - /** - * A functional interface to plug in shard result consumers to this collector - */ - @FunctionalInterface - public interface ResultConsumer { - void consume(int shardIndex, R result); - } } diff --git a/core/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java b/core/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java index 0ac3c69b8eb..353baf11750 100644 --- a/core/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java +++ b/core/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java @@ -20,16 +20,17 @@ package org.elasticsearch.action.search; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.dfs.AggregatedDfs; import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.query.QuerySearchRequest; -import org.elasticsearch.search.query.QuerySearchResultProvider; +import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.transport.Transport; import java.io.IOException; +import java.util.List; import java.util.function.Function; /** @@ -40,16 +41,16 @@ import java.util.function.Function; * @see CountedCollector#onFailure(int, SearchShardTarget, Exception) */ final class DfsQueryPhase extends SearchPhase { - private final InitialSearchPhase.SearchPhaseResults queryResult; + private final InitialSearchPhase.SearchPhaseResults queryResult; private final SearchPhaseController searchPhaseController; private final AtomicArray dfsSearchResults; - private final Function, SearchPhase> nextPhaseFactory; + private final Function, SearchPhase> nextPhaseFactory; private final SearchPhaseContext context; private final SearchTransportService searchTransportService; DfsQueryPhase(AtomicArray dfsSearchResults, SearchPhaseController searchPhaseController, - Function, SearchPhase> nextPhaseFactory, + Function, SearchPhase> nextPhaseFactory, SearchPhaseContext context) { super("dfs_query"); this.queryResult = searchPhaseController.newSearchPhaseResults(context.getRequest(), context.getNumShards()); @@ -64,22 +65,26 @@ final class DfsQueryPhase extends SearchPhase { public void run() throws IOException { // TODO we can potentially also consume the actual per shard results from the initial phase here in the aggregateDfs // to free up memory early - final AggregatedDfs dfs = searchPhaseController.aggregateDfs(dfsSearchResults); - final CountedCollector counter = new CountedCollector<>(queryResult::consumeResult, - dfsSearchResults.asList().size(), - () -> { - context.executeNextPhase(this, nextPhaseFactory.apply(queryResult)); - }, context); - for (final AtomicArray.Entry entry : dfsSearchResults.asList()) { - DfsSearchResult dfsResult = entry.value; - final int shardIndex = entry.index; - final SearchShardTarget searchShardTarget = dfsResult.shardTarget(); + final List resultList = dfsSearchResults.asList(); + final AggregatedDfs dfs = searchPhaseController.aggregateDfs(resultList); + final CountedCollector counter = new CountedCollector<>(queryResult::consumeResult, + resultList.size(), + () -> context.executeNextPhase(this, nextPhaseFactory.apply(queryResult)), context); + for (final DfsSearchResult dfsResult : resultList) { + final SearchShardTarget searchShardTarget = dfsResult.getSearchShardTarget(); Transport.Connection connection = context.getConnection(searchShardTarget.getNodeId()); - QuerySearchRequest querySearchRequest = new QuerySearchRequest(context.getRequest(), dfsResult.id(), dfs); + QuerySearchRequest querySearchRequest = new QuerySearchRequest(context.getRequest(), dfsResult.getRequestId(), dfs); + final int shardIndex = dfsResult.getShardIndex(); searchTransportService.sendExecuteQuery(connection, querySearchRequest, context.getTask(), - ActionListener.wrap( - result -> counter.onResult(shardIndex, result, searchShardTarget), - exception -> { + new SearchActionListener(searchShardTarget, shardIndex) { + + @Override + protected void innerOnResponse(QuerySearchResult response) { + counter.onResult(response); + } + + @Override + public void onFailure(Exception exception) { try { if (context.getLogger().isDebugEnabled()) { context.getLogger().debug((Supplier) () -> new ParameterizedMessage("[{}] Failed to execute query phase", @@ -92,7 +97,8 @@ final class DfsQueryPhase extends SearchPhase { // release it again to be in the safe side context.sendReleaseSearchContext(querySearchRequest.id(), connection); } - })); + } + }); } } } diff --git a/core/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java b/core/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java index 20d91770675..428053c357b 100644 --- a/core/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java +++ b/core/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java @@ -23,15 +23,14 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.apache.lucene.search.ScoreDoc; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchSearchRequest; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.query.QuerySearchResult; -import org.elasticsearch.search.query.QuerySearchResultProvider; import org.elasticsearch.transport.Transport; import java.io.IOException; @@ -45,13 +44,13 @@ import java.util.function.Function; final class FetchSearchPhase extends SearchPhase { private final AtomicArray fetchResults; private final SearchPhaseController searchPhaseController; - private final AtomicArray queryResults; + private final AtomicArray queryResults; private final Function nextPhaseFactory; private final SearchPhaseContext context; private final Logger logger; - private final InitialSearchPhase.SearchPhaseResults resultConsumer; + private final InitialSearchPhase.SearchPhaseResults resultConsumer; - FetchSearchPhase(InitialSearchPhase.SearchPhaseResults resultConsumer, + FetchSearchPhase(InitialSearchPhase.SearchPhaseResults resultConsumer, SearchPhaseController searchPhaseController, SearchPhaseContext context) { this(resultConsumer, searchPhaseController, context, @@ -59,7 +58,7 @@ final class FetchSearchPhase extends SearchPhase { (finalResponse) -> sendResponsePhase(finalResponse, context))); } - FetchSearchPhase(InitialSearchPhase.SearchPhaseResults resultConsumer, + FetchSearchPhase(InitialSearchPhase.SearchPhaseResults resultConsumer, SearchPhaseController searchPhaseController, SearchPhaseContext context, Function nextPhaseFactory) { super("fetch"); @@ -98,35 +97,35 @@ final class FetchSearchPhase extends SearchPhase { private void innerRun() throws IOException { final int numShards = context.getNumShards(); final boolean isScrollSearch = context.getRequest().scroll() != null; - ScoreDoc[] sortedShardDocs = searchPhaseController.sortDocs(isScrollSearch, queryResults); + List phaseResults = queryResults.asList(); + ScoreDoc[] sortedShardDocs = searchPhaseController.sortDocs(isScrollSearch, phaseResults, context.getNumShards()); String scrollId = isScrollSearch ? TransportSearchHelper.buildScrollId(queryResults) : null; - List> queryResultsAsList = queryResults.asList(); final SearchPhaseController.ReducedQueryPhase reducedQueryPhase = resultConsumer.reduce(); final boolean queryAndFetchOptimization = queryResults.length() == 1; final Runnable finishPhase = () -> moveToNextPhase(searchPhaseController, sortedShardDocs, scrollId, reducedQueryPhase, queryAndFetchOptimization ? queryResults : fetchResults); if (queryAndFetchOptimization) { - assert queryResults.get(0) == null || queryResults.get(0).fetchResult() != null; + assert phaseResults.isEmpty() || phaseResults.get(0).fetchResult() != null; // query AND fetch optimization finishPhase.run(); } else { final IntArrayList[] docIdsToLoad = searchPhaseController.fillDocIdsToLoad(numShards, sortedShardDocs); if (sortedShardDocs.length == 0) { // no docs to fetch -- sidestep everything and return - queryResultsAsList.stream() - .map(e -> e.value.queryResult()) + phaseResults.stream() + .map(e -> e.queryResult()) .forEach(this::releaseIrrelevantSearchContext); // we have to release contexts here to free up resources finishPhase.run(); } else { final ScoreDoc[] lastEmittedDocPerShard = isScrollSearch ? searchPhaseController.getLastEmittedDocPerShard(reducedQueryPhase, sortedShardDocs, numShards) : null; - final CountedCollector counter = new CountedCollector<>(fetchResults::set, + final CountedCollector counter = new CountedCollector<>(r -> fetchResults.set(r.getShardIndex(), r), docIdsToLoad.length, // we count down every shard in the result no matter if we got any results or not finishPhase, context); for (int i = 0; i < docIdsToLoad.length; i++) { IntArrayList entry = docIdsToLoad[i]; - QuerySearchResultProvider queryResult = queryResults.get(i); + SearchPhaseResult queryResult = queryResults.get(i); if (entry == null) { // no results for this shard ID if (queryResult != null) { // if we got some hits from this shard we have to release the context there @@ -137,10 +136,10 @@ final class FetchSearchPhase extends SearchPhase { // in any case we count down this result since we don't talk to this shard anymore counter.countDown(); } else { - Transport.Connection connection = context.getConnection(queryResult.shardTarget().getNodeId()); - ShardFetchSearchRequest fetchSearchRequest = createFetchRequest(queryResult.queryResult().id(), i, entry, + Transport.Connection connection = context.getConnection(queryResult.getSearchShardTarget().getNodeId()); + ShardFetchSearchRequest fetchSearchRequest = createFetchRequest(queryResult.queryResult().getRequestId(), i, entry, lastEmittedDocPerShard); - executeFetch(i, queryResult.shardTarget(), counter, fetchSearchRequest, queryResult.queryResult(), + executeFetch(i, queryResult.getSearchShardTarget(), counter, fetchSearchRequest, queryResult.queryResult(), connection); } } @@ -159,10 +158,10 @@ final class FetchSearchPhase extends SearchPhase { final ShardFetchSearchRequest fetchSearchRequest, final QuerySearchResult querySearchResult, final Transport.Connection connection) { context.getSearchTransport().sendExecuteFetch(connection, fetchSearchRequest, context.getTask(), - new ActionListener() { + new SearchActionListener(shardTarget, shardIndex) { @Override - public void onResponse(FetchSearchResult result) { - counter.onResult(shardIndex, result, shardTarget); + public void innerOnResponse(FetchSearchResult result) { + counter.onResult(result); } @Override @@ -191,8 +190,8 @@ final class FetchSearchPhase extends SearchPhase { // and if it has at lease one hit that didn't make it to the global topDocs if (context.getRequest().scroll() == null && queryResult.hasHits()) { try { - Transport.Connection connection = context.getConnection(queryResult.shardTarget().getNodeId()); - context.sendReleaseSearchContext(queryResult.id(), connection); + Transport.Connection connection = context.getConnection(queryResult.getSearchShardTarget().getNodeId()); + context.sendReleaseSearchContext(queryResult.getRequestId(), connection); } catch (Exception e) { context.getLogger().trace("failed to release context", e); } @@ -201,9 +200,9 @@ final class FetchSearchPhase extends SearchPhase { private void moveToNextPhase(SearchPhaseController searchPhaseController, ScoreDoc[] sortedDocs, String scrollId, SearchPhaseController.ReducedQueryPhase reducedQueryPhase, - AtomicArray fetchResultsArr) { + AtomicArray fetchResultsArr) { final InternalSearchResponse internalResponse = searchPhaseController.merge(context.getRequest().scroll() != null, - sortedDocs, reducedQueryPhase, fetchResultsArr); + sortedDocs, reducedQueryPhase, fetchResultsArr.asList(), fetchResultsArr::get); context.executeNextPhase(this, nextPhaseFactory.apply(context.buildSearchResponse(internalResponse, scrollId))); } diff --git a/core/src/main/java/org/elasticsearch/action/search/InitialSearchPhase.java b/core/src/main/java/org/elasticsearch/action/search/InitialSearchPhase.java index f21e9d228d6..be91cebe501 100644 --- a/core/src/main/java/org/elasticsearch/action/search/InitialSearchPhase.java +++ b/core/src/main/java/org/elasticsearch/action/search/InitialSearchPhase.java @@ -21,7 +21,6 @@ package org.elasticsearch.action.search; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.cluster.routing.GroupShardsIterator; @@ -144,10 +143,11 @@ abstract class InitialSearchPhase extends onShardFailure(shardIndex, null, null, shardIt, new NoShardAvailableActionException(shardIt.shardId())); } else { try { - executePhaseOnShard(shardIt, shard, new ActionListener() { + executePhaseOnShard(shardIt, shard, new SearchActionListener(new SearchShardTarget(shard.currentNodeId(), + shardIt.shardId()), shardIndex) { @Override - public void onResponse(FirstResult result) { - onShardResult(shardIndex, shard.currentNodeId(), result, shardIt); + public void innerOnResponse(FirstResult result) { + onShardResult(result, shardIt); } @Override @@ -164,9 +164,10 @@ abstract class InitialSearchPhase extends } } - private void onShardResult(int shardIndex, String nodeId, FirstResult result, ShardIterator shardIt) { - result.shardTarget(new SearchShardTarget(nodeId, shardIt.shardId())); - onShardSuccess(shardIndex, result); + private void onShardResult(FirstResult result, ShardIterator shardIt) { + assert result.getShardIndex() != -1 : "shard index is not set"; + assert result.getSearchShardTarget() != null : "search shard target must not be null"; + onShardSuccess(result); // we need to increment successful ops first before we compare the exit condition otherwise if we // are fast we could concurrently update totalOps but then preempt one of the threads which can // cause the successor to read a wrong value from successfulOps if second phase is very fast ie. count etc. @@ -185,7 +186,7 @@ abstract class InitialSearchPhase extends /** * Executed once all shard results have been received and processed * @see #onShardFailure(int, SearchShardTarget, Exception) - * @see #onShardSuccess(int, SearchPhaseResult) + * @see #onShardSuccess(SearchPhaseResult) */ abstract void onPhaseDone(); // as a tribute to @kimchy aka. finishHim() @@ -201,12 +202,10 @@ abstract class InitialSearchPhase extends /** * Executed once for every successful shard level request. - * @param shardIndex the internal index for this shard. Each shard has an index / ordinal assigned that is used to reference - * it's results * @param result the result returned form the shard * */ - abstract void onShardSuccess(int shardIndex, FirstResult result); + abstract void onShardSuccess(FirstResult result); /** * Sends the request to the actual shard. @@ -214,7 +213,7 @@ abstract class InitialSearchPhase extends * @param shard the shard routing to send the request for * @param listener the listener to notify on response */ - protected abstract void executePhaseOnShard(ShardIterator shardIt, ShardRouting shard, ActionListener listener); + protected abstract void executePhaseOnShard(ShardIterator shardIt, ShardRouting shard, SearchActionListener listener); /** * This class acts as a basic result collection that can be extended to do on-the-fly reduction or result processing @@ -237,17 +236,16 @@ abstract class InitialSearchPhase extends * A stream of all non-null (successful) shard results */ final Stream getSuccessfulResults() { - return results.asList().stream().map(e -> e.value); + return results.asList().stream(); } /** * Consumes a single shard result - * @param shardIndex the shards index, this is a 0-based id that is used to establish a 1 to 1 mapping to the searched shards * @param result the shards result */ - void consumeResult(int shardIndex, Result result) { - assert results.get(shardIndex) == null : "shardIndex: " + shardIndex + " is already set"; - results.set(shardIndex, result); + void consumeResult(Result result) { + assert results.get(result.getShardIndex()) == null : "shardIndex: " + result.getShardIndex() + " is already set"; + results.set(result.getShardIndex(), result); } /** diff --git a/core/src/main/java/org/elasticsearch/action/search/RemoteClusterConnection.java b/core/src/main/java/org/elasticsearch/action/search/RemoteClusterConnection.java index 73bc4f2ee7e..aea1aab7d3e 100644 --- a/core/src/main/java/org/elasticsearch/action/search/RemoteClusterConnection.java +++ b/core/src/main/java/org/elasticsearch/action/search/RemoteClusterConnection.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CancellableThreads; import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.ConnectionProfile; @@ -59,7 +60,6 @@ import java.util.Set; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ExecutorService; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.Semaphore; @@ -373,10 +373,19 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo // here we pass on the connection since we can only close it once the sendRequest returns otherwise // due to the async nature (it will return before it's actually sent) this can cause the request to fail // due to an already closed connection. - transportService.sendRequest(connection, - ClusterStateAction.NAME, request, TransportRequestOptions.EMPTY, + ThreadPool threadPool = transportService.getThreadPool(); + ThreadContext threadContext = threadPool.getThreadContext(); + TransportService.ContextRestoreResponseHandler responseHandler = new TransportService + .ContextRestoreResponseHandler<>(threadContext.newRestorableContext(false), new SniffClusterStateResponseHandler(transportService, connection, listener, seedNodes, cancellableThreads)); + try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { + // we stash any context here since this is an internal execution and should not leak any + // existing context information. + threadContext.markAsSystemContext(); + transportService.sendRequest(connection, ClusterStateAction.NAME, request, TransportRequestOptions.EMPTY, + responseHandler); + } success = true; } finally { if (success == false) { @@ -445,6 +454,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo @Override public void handleResponse(ClusterStateResponse response) { + assert transportService.getThreadPool().getThreadContext().isSystemContext() == false : "context is a system context"; try { try (Closeable theConnection = connection) { // the connection is unused - see comment in #collectRemoteNodes // we have to close this connection before we notify listeners - this is mainly needed for test correctness @@ -483,6 +493,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo @Override public void handleException(TransportException exp) { + assert transportService.getThreadPool().getThreadContext().isSystemContext() == false : "context is a system context"; logger.warn((Supplier) () -> new ParameterizedMessage("fetching nodes from external cluster {} failed", clusterAlias), exp); @@ -505,4 +516,9 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo assert connectHandler.running.availablePermits() == 1; return true; } + + boolean isNodeConnected(final DiscoveryNode node) { + return connectedNodes.contains(node); + } + } diff --git a/core/src/main/java/org/elasticsearch/action/search/RemoteClusterService.java b/core/src/main/java/org/elasticsearch/action/search/RemoteClusterService.java index d9e6862c697..089ce57a114 100644 --- a/core/src/main/java/org/elasticsearch/action/search/RemoteClusterService.java +++ b/core/src/main/java/org/elasticsearch/action/search/RemoteClusterService.java @@ -26,8 +26,10 @@ import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsGroup; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.PlainShardIterator; import org.elasticsearch.cluster.routing.ShardIterator; +import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; @@ -136,7 +138,7 @@ public final class RemoteClusterService extends AbstractComponent implements Clo // nodes can be tagged with node.attr.remote_gateway: true to allow a node to be a gateway node for // cross cluster search String attribute = REMOTE_NODE_ATTRIBUTE.get(settings); - nodePredicate = nodePredicate.and((node) -> Boolean.getBoolean(node.getAttributes().getOrDefault(attribute, "false"))); + nodePredicate = nodePredicate.and((node) -> Booleans.parseBoolean(node.getAttributes().getOrDefault(attribute, "false"))); } remoteClusters.putAll(this.remoteClusters); for (Map.Entry> entry : seeds.entrySet()) { @@ -185,6 +187,10 @@ public final class RemoteClusterService extends AbstractComponent implements Clo return remoteClusters.isEmpty() == false; } + boolean isRemoteNodeConnected(final String remoteCluster, final DiscoveryNode node) { + return remoteClusters.get(remoteCluster).isNodeConnected(node); + } + /** * Groups indices per cluster by splitting remote cluster-alias, index-name pairs on {@link #REMOTE_CLUSTER_INDEX_SEPARATOR}. All * indices per cluster are collected as a list in the returned map keyed by the cluster alias. Local indices are grouped under @@ -326,13 +332,20 @@ public final class RemoteClusterService extends AbstractComponent implements Clo } void updateRemoteCluster(String clusterAlias, List addresses) { - updateRemoteClusters(Collections.singletonMap(clusterAlias, addresses.stream().map(address -> { - TransportAddress transportAddress = new TransportAddress(address); - return new DiscoveryNode(clusterAlias + "#" + transportAddress.toString(), - transportAddress, - Version.CURRENT.minimumCompatibilityVersion()); - }).collect(Collectors.toList())), - ActionListener.wrap((x) -> {}, (x) -> {}) ); + updateRemoteCluster(clusterAlias, addresses, ActionListener.wrap((x) -> {}, (x) -> {})); + } + + void updateRemoteCluster( + final String clusterAlias, + final List addresses, + final ActionListener connectionListener) { + final List nodes = addresses.stream().map(address -> { + final TransportAddress transportAddress = new TransportAddress(address); + final String id = clusterAlias + "#" + transportAddress.toString(); + final Version version = Version.CURRENT.minimumCompatibilityVersion(); + return new DiscoveryNode(id, transportAddress, version); + }).collect(Collectors.toList()); + updateRemoteClusters(Collections.singletonMap(clusterAlias, nodes), connectionListener); } static Map> buildRemoteClustersSeeds(Settings settings) { diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchActionListener.java b/core/src/main/java/org/elasticsearch/action/search/SearchActionListener.java new file mode 100644 index 00000000000..709d1e5e237 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/search/SearchActionListener.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.search; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.search.SearchPhaseResult; +import org.elasticsearch.search.SearchShardTarget; + +/** + * An base action listener that ensures shard target and shard index is set on all responses + * received by this listener. + */ +abstract class SearchActionListener implements ActionListener { + private final int requestIndex; + private final SearchShardTarget searchShardTarget; + + protected SearchActionListener(SearchShardTarget searchShardTarget, + int shardIndex) { + assert shardIndex >= 0 : "shard index must be positive"; + this.searchShardTarget = searchShardTarget; + this.requestIndex = shardIndex; + } + + @Override + public final void onResponse(T response) { + response.setShardIndex(requestIndex); + setSearchShardTarget(response); + innerOnResponse(response); + } + + protected void setSearchShardTarget(T response) { // some impls need to override this + response.setSearchShardTarget(searchShardTarget); + } + + protected abstract void innerOnResponse(T response); + +} diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java index d3b2ea3a98e..7151c8712ed 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java @@ -72,7 +72,7 @@ final class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction protected void executePhaseOnShard( final ShardIterator shardIt, final ShardRouting shard, - final ActionListener listener) { + final SearchActionListener listener) { getSearchTransport().sendExecuteDfs(getConnection(shard.currentNodeId()), buildShardSearchRequest(shardIt, shard) , getTask(), listener); } diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java b/core/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java index 1a21eb3cc34..26c5403f4ab 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java @@ -93,8 +93,8 @@ interface SearchPhaseContext extends ActionListener, Executor { /** * Releases a search context with the given context ID on the node the given connection is connected to. - * @see org.elasticsearch.search.query.QuerySearchResult#id() - * @see org.elasticsearch.search.fetch.FetchSearchResult#id() + * @see org.elasticsearch.search.query.QuerySearchResult#getRequestId() + * @see org.elasticsearch.search.fetch.FetchSearchResult#getRequestId() * */ default void sendReleaseSearchContext(long contextId, Transport.Connection connection) { diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/core/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index cec44d9e9e5..810530b5507 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -36,10 +36,10 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.InternalAggregations; @@ -52,7 +52,6 @@ import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.profile.ProfileShardResult; import org.elasticsearch.search.profile.SearchProfileShardResults; import org.elasticsearch.search.query.QuerySearchResult; -import org.elasticsearch.search.query.QuerySearchResultProvider; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.Suggest.Suggestion; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry; @@ -61,14 +60,16 @@ import org.elasticsearch.search.suggest.completion.CompletionSuggestion; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.IntFunction; import java.util.stream.Collectors; import java.util.stream.StreamSupport; -public class SearchPhaseController extends AbstractComponent { +public final class SearchPhaseController extends AbstractComponent { private static final ScoreDoc[] EMPTY_DOCS = new ScoreDoc[0]; @@ -81,13 +82,13 @@ public class SearchPhaseController extends AbstractComponent { this.scriptService = scriptService; } - public AggregatedDfs aggregateDfs(AtomicArray results) { + public AggregatedDfs aggregateDfs(Collection results) { ObjectObjectHashMap termStatistics = HppcMaps.newNoNullKeysMap(); ObjectObjectHashMap fieldStatistics = HppcMaps.newNoNullKeysMap(); long aggMaxDoc = 0; - for (AtomicArray.Entry lEntry : results.asList()) { - final Term[] terms = lEntry.value.terms(); - final TermStatistics[] stats = lEntry.value.termStatistics(); + for (DfsSearchResult lEntry : results) { + final Term[] terms = lEntry.terms(); + final TermStatistics[] stats = lEntry.termStatistics(); assert terms.length == stats.length; for (int i = 0; i < terms.length; i++) { assert terms[i] != null; @@ -105,9 +106,9 @@ public class SearchPhaseController extends AbstractComponent { } - assert !lEntry.value.fieldStatistics().containsKey(null); - final Object[] keys = lEntry.value.fieldStatistics().keys; - final Object[] values = lEntry.value.fieldStatistics().values; + assert !lEntry.fieldStatistics().containsKey(null); + final Object[] keys = lEntry.fieldStatistics().keys; + final Object[] values = lEntry.fieldStatistics().values; for (int i = 0; i < keys.length; i++) { if (keys[i] != null) { String key = (String) keys[i]; @@ -127,7 +128,7 @@ public class SearchPhaseController extends AbstractComponent { } } } - aggMaxDoc += lEntry.value.maxDoc(); + aggMaxDoc += lEntry.maxDoc(); } return new AggregatedDfs(termStatistics, fieldStatistics, aggMaxDoc); } @@ -146,10 +147,9 @@ public class SearchPhaseController extends AbstractComponent { * * @param ignoreFrom Whether to ignore the from and sort all hits in each shard result. * Enabled only for scroll search, because that only retrieves hits of length 'size' in the query phase. - * @param resultsArr Shard result holder + * @param results Shard result holder */ - public ScoreDoc[] sortDocs(boolean ignoreFrom, AtomicArray resultsArr) throws IOException { - List> results = resultsArr.asList(); + public ScoreDoc[] sortDocs(boolean ignoreFrom, Collection results, int numShards) throws IOException { if (results.isEmpty()) { return EMPTY_DOCS; } @@ -159,25 +159,25 @@ public class SearchPhaseController extends AbstractComponent { int shardIndex = -1; if (results.size() == 1) { canOptimize = true; - result = results.get(0).value.queryResult(); - shardIndex = results.get(0).index; + result = results.stream().findFirst().get().queryResult(); + shardIndex = result.getShardIndex(); } else { boolean hasResult = false; QuerySearchResult resultToOptimize = null; // lets see if we only got hits from a single shard, if so, we can optimize... - for (AtomicArray.Entry entry : results) { - if (entry.value.queryResult().hasHits()) { + for (SearchPhaseResult entry : results) { + if (entry.queryResult().hasHits()) { if (hasResult) { // we already have one, can't really optimize canOptimize = false; break; } canOptimize = true; hasResult = true; - resultToOptimize = entry.value.queryResult(); - shardIndex = entry.index; + resultToOptimize = entry.queryResult(); + shardIndex = resultToOptimize.getShardIndex(); } } - result = canOptimize ? resultToOptimize : results.get(0).value.queryResult(); + result = canOptimize ? resultToOptimize : results.stream().findFirst().get().queryResult(); assert result != null; } if (canOptimize) { @@ -228,22 +228,21 @@ public class SearchPhaseController extends AbstractComponent { final int from = ignoreFrom ? 0 : result.queryResult().from(); final TopDocs mergedTopDocs; - final int numShards = resultsArr.length(); if (result.queryResult().topDocs() instanceof CollapseTopFieldDocs) { CollapseTopFieldDocs firstTopDocs = (CollapseTopFieldDocs) result.queryResult().topDocs(); final Sort sort = new Sort(firstTopDocs.fields); final CollapseTopFieldDocs[] shardTopDocs = new CollapseTopFieldDocs[numShards]; fillTopDocs(shardTopDocs, results, new CollapseTopFieldDocs(firstTopDocs.field, 0, new FieldDoc[0], sort.getSort(), new Object[0], Float.NaN)); - mergedTopDocs = CollapseTopFieldDocs.merge(sort, from, topN, shardTopDocs); + mergedTopDocs = CollapseTopFieldDocs.merge(sort, from, topN, shardTopDocs, true); } else if (result.queryResult().topDocs() instanceof TopFieldDocs) { TopFieldDocs firstTopDocs = (TopFieldDocs) result.queryResult().topDocs(); final Sort sort = new Sort(firstTopDocs.fields); - final TopFieldDocs[] shardTopDocs = new TopFieldDocs[resultsArr.length()]; + final TopFieldDocs[] shardTopDocs = new TopFieldDocs[numShards]; fillTopDocs(shardTopDocs, results, new TopFieldDocs(0, new FieldDoc[0], sort.getSort(), Float.NaN)); mergedTopDocs = TopDocs.merge(sort, from, topN, shardTopDocs, true); } else { - final TopDocs[] shardTopDocs = new TopDocs[resultsArr.length()]; + final TopDocs[] shardTopDocs = new TopDocs[numShards]; fillTopDocs(shardTopDocs, results, Lucene.EMPTY_TOP_DOCS); mergedTopDocs = TopDocs.merge(from, topN, shardTopDocs, true); } @@ -251,11 +250,11 @@ public class SearchPhaseController extends AbstractComponent { ScoreDoc[] scoreDocs = mergedTopDocs.scoreDocs; final Map>> groupedCompletionSuggestions = new HashMap<>(); // group suggestions and assign shard index - for (AtomicArray.Entry sortedResult : results) { - Suggest shardSuggest = sortedResult.value.queryResult().suggest(); + for (SearchPhaseResult sortedResult : results) { + Suggest shardSuggest = sortedResult.queryResult().suggest(); if (shardSuggest != null) { for (CompletionSuggestion suggestion : shardSuggest.filter(CompletionSuggestion.class)) { - suggestion.setShardIndex(sortedResult.index); + suggestion.setShardIndex(sortedResult.getShardIndex()); List> suggestions = groupedCompletionSuggestions.computeIfAbsent(suggestion.getName(), s -> new ArrayList<>()); suggestions.add(suggestion); @@ -286,17 +285,16 @@ public class SearchPhaseController extends AbstractComponent { } static void fillTopDocs(T[] shardTopDocs, - List> results, - T empytTopDocs) { + Collection results, T empytTopDocs) { if (results.size() != shardTopDocs.length) { // TopDocs#merge can't deal with null shard TopDocs Arrays.fill(shardTopDocs, empytTopDocs); } - for (AtomicArray.Entry resultProvider : results) { - final T topDocs = (T) resultProvider.value.queryResult().topDocs(); + for (SearchPhaseResult resultProvider : results) { + final T topDocs = (T) resultProvider.queryResult().topDocs(); assert topDocs != null : "top docs must not be null in a valid result"; // the 'index' field is the position in the resultsArr atomic array - shardTopDocs[resultProvider.index] = topDocs; + shardTopDocs[resultProvider.getShardIndex()] = topDocs; } } public ScoreDoc[] getLastEmittedDocPerShard(ReducedQueryPhase reducedQueryPhase, @@ -340,12 +338,11 @@ public class SearchPhaseController extends AbstractComponent { */ public InternalSearchResponse merge(boolean ignoreFrom, ScoreDoc[] sortedDocs, ReducedQueryPhase reducedQueryPhase, - AtomicArray fetchResultsArr) { + Collection fetchResults, IntFunction resultsLookup) { if (reducedQueryPhase.isEmpty()) { return InternalSearchResponse.empty(); } - List> fetchResults = fetchResultsArr.asList(); - SearchHits hits = getHits(reducedQueryPhase, ignoreFrom, sortedDocs, fetchResultsArr); + SearchHits hits = getHits(reducedQueryPhase, ignoreFrom, sortedDocs, fetchResults, resultsLookup); if (reducedQueryPhase.suggest != null) { if (!fetchResults.isEmpty()) { int currentOffset = hits.getHits().length; @@ -353,7 +350,7 @@ public class SearchPhaseController extends AbstractComponent { final List suggestionOptions = suggestion.getOptions(); for (int scoreDocIndex = currentOffset; scoreDocIndex < currentOffset + suggestionOptions.size(); scoreDocIndex++) { ScoreDoc shardDoc = sortedDocs[scoreDocIndex]; - QuerySearchResultProvider searchResultProvider = fetchResultsArr.get(shardDoc.shardIndex); + SearchPhaseResult searchResultProvider = resultsLookup.apply(shardDoc.shardIndex); if (searchResultProvider == null) { continue; } @@ -364,7 +361,7 @@ public class SearchPhaseController extends AbstractComponent { CompletionSuggestion.Entry.Option suggestOption = suggestionOptions.get(scoreDocIndex - currentOffset); hit.score(shardDoc.score); - hit.shard(fetchResult.shardTarget()); + hit.shard(fetchResult.getSearchShardTarget()); suggestOption.setHit(hit); } } @@ -377,8 +374,7 @@ public class SearchPhaseController extends AbstractComponent { } private SearchHits getHits(ReducedQueryPhase reducedQueryPhase, boolean ignoreFrom, ScoreDoc[] sortedDocs, - AtomicArray fetchResultsArr) { - List> fetchResults = fetchResultsArr.asList(); + Collection fetchResults, IntFunction resultsLookup) { boolean sorted = false; int sortScoreIndex = -1; if (reducedQueryPhase.oneResult.topDocs() instanceof TopFieldDocs) { @@ -396,8 +392,8 @@ public class SearchPhaseController extends AbstractComponent { } } // clean the fetch counter - for (AtomicArray.Entry entry : fetchResults) { - entry.value.fetchResult().initCounter(); + for (SearchPhaseResult entry : fetchResults) { + entry.fetchResult().initCounter(); } int from = ignoreFrom ? 0 : reducedQueryPhase.oneResult.queryResult().from(); int numSearchHits = (int) Math.min(reducedQueryPhase.fetchHits - from, reducedQueryPhase.oneResult.size()); @@ -408,7 +404,7 @@ public class SearchPhaseController extends AbstractComponent { if (!fetchResults.isEmpty()) { for (int i = 0; i < numSearchHits; i++) { ScoreDoc shardDoc = sortedDocs[i]; - QuerySearchResultProvider fetchResultProvider = fetchResultsArr.get(shardDoc.shardIndex); + SearchPhaseResult fetchResultProvider = resultsLookup.apply(shardDoc.shardIndex); if (fetchResultProvider == null) { continue; } @@ -417,7 +413,7 @@ public class SearchPhaseController extends AbstractComponent { if (index < fetchResult.hits().internalHits().length) { SearchHit searchHit = fetchResult.hits().internalHits()[index]; searchHit.score(shardDoc.score); - searchHit.shard(fetchResult.shardTarget()); + searchHit.shard(fetchResult.getSearchShardTarget()); if (sorted) { FieldDoc fieldDoc = (FieldDoc) shardDoc; searchHit.sortValues(fieldDoc.fields, reducedQueryPhase.oneResult.sortValueFormats()); @@ -437,7 +433,7 @@ public class SearchPhaseController extends AbstractComponent { * Reduces the given query results and consumes all aggregations and profile results. * @param queryResults a list of non-null query shard results */ - public final ReducedQueryPhase reducedQueryPhase(List> queryResults) { + public ReducedQueryPhase reducedQueryPhase(List queryResults) { return reducedQueryPhase(queryResults, null, 0); } @@ -450,7 +446,7 @@ public class SearchPhaseController extends AbstractComponent { * @see QuerySearchResult#consumeAggs() * @see QuerySearchResult#consumeProfileResult() */ - private ReducedQueryPhase reducedQueryPhase(List> queryResults, + private ReducedQueryPhase reducedQueryPhase(Collection queryResults, List bufferdAggs, int numReducePhases) { assert numReducePhases >= 0 : "num reduce phases must be >= 0 but was: " + numReducePhases; numReducePhases++; // increment for this phase @@ -463,7 +459,7 @@ public class SearchPhaseController extends AbstractComponent { return new ReducedQueryPhase(totalHits, fetchHits, maxScore, timedOut, terminatedEarly, null, null, null, null, numReducePhases); } - final QuerySearchResult firstResult = queryResults.get(0).value.queryResult(); + final QuerySearchResult firstResult = queryResults.stream().findFirst().get().queryResult(); final boolean hasSuggest = firstResult.suggest() != null; final boolean hasProfileResults = firstResult.hasProfileResults(); final boolean consumeAggs; @@ -487,8 +483,8 @@ public class SearchPhaseController extends AbstractComponent { final Map> groupedSuggestions = hasSuggest ? new HashMap<>() : Collections.emptyMap(); final Map profileResults = hasProfileResults ? new HashMap<>(queryResults.size()) : Collections.emptyMap(); - for (AtomicArray.Entry entry : queryResults) { - QuerySearchResult result = entry.value.queryResult(); + for (SearchPhaseResult entry : queryResults) { + QuerySearchResult result = entry.queryResult(); if (result.searchTimedOut()) { timedOut = true; } @@ -515,7 +511,7 @@ public class SearchPhaseController extends AbstractComponent { aggregationsList.add((InternalAggregations) result.consumeAggs()); } if (hasProfileResults) { - String key = result.shardTarget().toString(); + String key = result.getSearchShardTarget().toString(); profileResults.put(key, result.consumeProfileResult()); } } @@ -601,7 +597,7 @@ public class SearchPhaseController extends AbstractComponent { /** * Creates a new search response from the given merged hits. - * @see #merge(boolean, ScoreDoc[], ReducedQueryPhase, AtomicArray) + * @see #merge(boolean, ScoreDoc[], ReducedQueryPhase, Collection, IntFunction) */ public InternalSearchResponse buildResponse(SearchHits hits) { return new InternalSearchResponse(hits, aggregations, suggest, shardResults, timedOut, terminatedEarly, numReducePhases); @@ -622,7 +618,7 @@ public class SearchPhaseController extends AbstractComponent { * iff the buffer is exhausted. */ static final class QueryPhaseResultConsumer - extends InitialSearchPhase.SearchPhaseResults { + extends InitialSearchPhase.SearchPhaseResults { private final InternalAggregations[] buffer; private int index; private final SearchPhaseController controller; @@ -649,8 +645,8 @@ public class SearchPhaseController extends AbstractComponent { } @Override - public void consumeResult(int shardIndex, QuerySearchResultProvider result) { - super.consumeResult(shardIndex, result); + public void consumeResult(SearchPhaseResult result) { + super.consumeResult(result); QuerySearchResult queryResult = result.queryResult(); assert queryResult.hasAggs() : "this collector should only be used if aggs are requested"; consumeInternal(queryResult); @@ -691,7 +687,7 @@ public class SearchPhaseController extends AbstractComponent { /** * Returns a new SearchPhaseResults instance. This might return an instance that reduces search responses incrementally. */ - InitialSearchPhase.SearchPhaseResults newSearchPhaseResults(SearchRequest request, int numShards) { + InitialSearchPhase.SearchPhaseResults newSearchPhaseResults(SearchRequest request, int numShards) { SearchSourceBuilder source = request.source(); if (source != null && source.aggregations() != null) { if (request.getBatchedReduceSize() < numShards) { diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java index fe87b8f4dba..fd1d1977029 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java @@ -24,8 +24,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.internal.AliasFilter; -import org.elasticsearch.search.query.QuerySearchResultProvider; import org.elasticsearch.transport.Transport; import java.util.Map; @@ -33,7 +33,7 @@ import java.util.concurrent.Executor; import java.util.function.Function; final class SearchQueryThenFetchAsyncAction - extends AbstractSearchAsyncAction { + extends AbstractSearchAsyncAction { private final SearchPhaseController searchPhaseController; @@ -69,11 +69,10 @@ final class SearchQueryThenFetchAsyncAction this.searchPhaseController = searchPhaseController; } - protected void executePhaseOnShard( final ShardIterator shardIt, final ShardRouting shard, - final ActionListener listener) { + final SearchActionListener listener) { getSearchTransport().sendExecuteQuery( getConnection(shard.currentNodeId()), buildShardSearchRequest(shardIt, shard), @@ -83,9 +82,8 @@ final class SearchQueryThenFetchAsyncAction @Override protected SearchPhase getNextPhase( - final SearchPhaseResults results, + final SearchPhaseResults results, final SearchPhaseContext context) { return new FetchSearchPhase(results, searchPhaseController, context); } - } diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryAndFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryAndFetchAsyncAction.java index b005c0fc2fe..cda974a33de 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryAndFetchAsyncAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryAndFetchAsyncAction.java @@ -32,13 +32,14 @@ import org.elasticsearch.search.fetch.QueryFetchSearchResult; import org.elasticsearch.search.fetch.ScrollQueryFetchSearchResult; import org.elasticsearch.search.internal.InternalScrollSearchRequest; import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.search.query.ScrollQuerySearchResult; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.action.search.TransportSearchHelper.internalScrollSearchRequest; -class SearchScrollQueryAndFetchAsyncAction extends AbstractAsyncAction { +final class SearchScrollQueryAndFetchAsyncAction extends AbstractAsyncAction { private final Logger logger; private final SearchPhaseController searchPhaseController; @@ -70,21 +71,17 @@ class SearchScrollQueryAndFetchAsyncAction extends AbstractAsyncAction { this.queryFetchResults = new AtomicArray<>(scrollId.getContext().length); } - protected final ShardSearchFailure[] buildShardFailures() { + private ShardSearchFailure[] buildShardFailures() { if (shardFailures == null) { return ShardSearchFailure.EMPTY_ARRAY; } - List> entries = shardFailures.asList(); - ShardSearchFailure[] failures = new ShardSearchFailure[entries.size()]; - for (int i = 0; i < failures.length; i++) { - failures[i] = entries.get(i).value; - } - return failures; + List failures = shardFailures.asList(); + return failures.toArray(new ShardSearchFailure[failures.size()]); } // we do our best to return the shard failures, but its ok if its not fully concurrently safe // we simply try and return as much as possible - protected final void addShardFailure(final int shardIndex, ShardSearchFailure failure) { + private void addShardFailure(final int shardIndex, ShardSearchFailure failure) { if (shardFailures == null) { shardFailures = new AtomicArray<>(scrollId.getContext().length); } @@ -130,15 +127,20 @@ class SearchScrollQueryAndFetchAsyncAction extends AbstractAsyncAction { void executePhase(final int shardIndex, DiscoveryNode node, final long searchId) { InternalScrollSearchRequest internalRequest = internalScrollSearchRequest(searchId, request); - searchTransportService.sendExecuteFetch(node, internalRequest, task, new ActionListener() { + searchTransportService.sendExecuteScrollFetch(node, internalRequest, task, + new SearchActionListener(null, shardIndex) { @Override - public void onResponse(ScrollQueryFetchSearchResult result) { - queryFetchResults.set(shardIndex, result.result()); + protected void setSearchShardTarget(ScrollQueryFetchSearchResult response) { + // don't do this - it's part of the response... + assert response.getSearchShardTarget() != null : "search shard target must not be null"; + } + @Override + protected void innerOnResponse(ScrollQueryFetchSearchResult response) { + queryFetchResults.set(response.getShardIndex(), response.result()); if (counter.decrementAndGet() == 0) { finishHim(); } } - @Override public void onFailure(Exception t) { onPhaseFailure(t, searchId, shardIndex); @@ -170,9 +172,10 @@ class SearchScrollQueryAndFetchAsyncAction extends AbstractAsyncAction { } private void innerFinishHim() throws Exception { - ScoreDoc[] sortedShardDocs = searchPhaseController.sortDocs(true, queryFetchResults); + List queryFetchSearchResults = queryFetchResults.asList(); + ScoreDoc[] sortedShardDocs = searchPhaseController.sortDocs(true, queryFetchResults.asList(), queryFetchResults.length()); final InternalSearchResponse internalResponse = searchPhaseController.merge(true, sortedShardDocs, - searchPhaseController.reducedQueryPhase(queryFetchResults.asList()), queryFetchResults); + searchPhaseController.reducedQueryPhase(queryFetchSearchResults), queryFetchSearchResults, queryFetchResults::get); String scrollId = null; if (request.scroll() != null) { scrollId = request.scrollId(); diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java index 13c81c1d5e6..aed234d4a89 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchRequest; import org.elasticsearch.search.internal.InternalScrollSearchRequest; @@ -41,7 +42,7 @@ import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.action.search.TransportSearchHelper.internalScrollSearchRequest; -class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction { +final class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction { private final Logger logger; private final SearchTask task; @@ -73,21 +74,17 @@ class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction { this.fetchResults = new AtomicArray<>(scrollId.getContext().length); } - protected final ShardSearchFailure[] buildShardFailures() { + private ShardSearchFailure[] buildShardFailures() { if (shardFailures == null) { return ShardSearchFailure.EMPTY_ARRAY; } - List> entries = shardFailures.asList(); - ShardSearchFailure[] failures = new ShardSearchFailure[entries.size()]; - for (int i = 0; i < failures.length; i++) { - failures[i] = entries.get(i).value; - } - return failures; + List failures = shardFailures.asList(); + return failures.toArray(new ShardSearchFailure[failures.size()]); } // we do our best to return the shard failures, but its ok if its not fully concurrently safe // we simply try and return as much as possible - protected final void addShardFailure(final int shardIndex, ShardSearchFailure failure) { + private void addShardFailure(final int shardIndex, ShardSearchFailure failure) { if (shardFailures == null) { shardFailures = new AtomicArray<>(scrollId.getContext().length); } @@ -99,8 +96,7 @@ class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction { listener.onFailure(new SearchPhaseExecutionException("query", "no nodes to search on", ShardSearchFailure.EMPTY_ARRAY)); return; } - final AtomicInteger counter = new AtomicInteger(scrollId.getContext().length); - + final CountDown counter = new CountDown(scrollId.getContext().length); ScrollIdForNode[] context = scrollId.getContext(); for (int i = 0; i < context.length; i++) { ScrollIdForNode target = context[i]; @@ -112,7 +108,7 @@ class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction { logger.debug("Node [{}] not available for scroll request [{}]", target.getNode(), scrollId.getSource()); } successfulOps.decrementAndGet(); - if (counter.decrementAndGet() == 0) { + if (counter.countDown()) { try { executeFetchPhase(); } catch (Exception e) { @@ -124,13 +120,21 @@ class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction { } } - private void executeQueryPhase(final int shardIndex, final AtomicInteger counter, DiscoveryNode node, final long searchId) { + private void executeQueryPhase(final int shardIndex, final CountDown counter, DiscoveryNode node, final long searchId) { InternalScrollSearchRequest internalRequest = internalScrollSearchRequest(searchId, request); - searchTransportService.sendExecuteQuery(node, internalRequest, task, new ActionListener() { + searchTransportService.sendExecuteScrollQuery(node, internalRequest, task, + new SearchActionListener(null, shardIndex) { + @Override - public void onResponse(ScrollQuerySearchResult result) { - queryResults.set(shardIndex, result.queryResult()); - if (counter.decrementAndGet() == 0) { + protected void setSearchShardTarget(ScrollQuerySearchResult response) { + // don't do this - it's part of the response... + assert response.getSearchShardTarget() != null : "search shard target must not be null"; + } + + @Override + protected void innerOnResponse(ScrollQuerySearchResult result) { + queryResults.setOnce(result.getShardIndex(), result.queryResult()); + if (counter.countDown()) { try { executeFetchPhase(); } catch (Exception e) { @@ -146,13 +150,13 @@ class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction { }); } - void onQueryPhaseFailure(final int shardIndex, final AtomicInteger counter, final long searchId, Exception failure) { + void onQueryPhaseFailure(final int shardIndex, final CountDown counter, final long searchId, Exception failure) { if (logger.isDebugEnabled()) { logger.debug((Supplier) () -> new ParameterizedMessage("[{}] Failed to execute query phase", searchId), failure); } addShardFailure(shardIndex, new ShardSearchFailure(failure)); successfulOps.decrementAndGet(); - if (counter.decrementAndGet() == 0) { + if (counter.countDown()) { if (successfulOps.get() == 0) { listener.onFailure(new SearchPhaseExecutionException("query", "all shards failed", failure, buildShardFailures())); } else { @@ -167,7 +171,7 @@ class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction { } private void executeFetchPhase() throws Exception { - sortedShardDocs = searchPhaseController.sortDocs(true, queryResults); + sortedShardDocs = searchPhaseController.sortDocs(true, queryResults.asList(), queryResults.length()); if (sortedShardDocs.length == 0) { finishHim(searchPhaseController.reducedQueryPhase(queryResults.asList())); return; @@ -177,21 +181,21 @@ class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction { SearchPhaseController.ReducedQueryPhase reducedQueryPhase = searchPhaseController.reducedQueryPhase(queryResults.asList()); final ScoreDoc[] lastEmittedDocPerShard = searchPhaseController.getLastEmittedDocPerShard(reducedQueryPhase, sortedShardDocs, queryResults.length()); - final AtomicInteger counter = new AtomicInteger(docIdsToLoad.length); + final CountDown counter = new CountDown(docIdsToLoad.length); for (int i = 0; i < docIdsToLoad.length; i++) { final int index = i; final IntArrayList docIds = docIdsToLoad[index]; if (docIds != null) { final QuerySearchResult querySearchResult = queryResults.get(index); ScoreDoc lastEmittedDoc = lastEmittedDocPerShard[index]; - ShardFetchRequest shardFetchRequest = new ShardFetchRequest(querySearchResult.id(), docIds, lastEmittedDoc); - DiscoveryNode node = nodes.get(querySearchResult.shardTarget().getNodeId()); - searchTransportService.sendExecuteFetchScroll(node, shardFetchRequest, task, new ActionListener() { + ShardFetchRequest shardFetchRequest = new ShardFetchRequest(querySearchResult.getRequestId(), docIds, lastEmittedDoc); + DiscoveryNode node = nodes.get(querySearchResult.getSearchShardTarget().getNodeId()); + searchTransportService.sendExecuteFetchScroll(node, shardFetchRequest, task, + new SearchActionListener(querySearchResult.getSearchShardTarget(), index) { @Override - public void onResponse(FetchSearchResult result) { - result.shardTarget(querySearchResult.shardTarget()); - fetchResults.set(index, result); - if (counter.decrementAndGet() == 0) { + protected void innerOnResponse(FetchSearchResult response) { + fetchResults.setOnce(response.getShardIndex(), response); + if (counter.countDown()) { finishHim(reducedQueryPhase); } } @@ -202,14 +206,14 @@ class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction { logger.debug("Failed to execute fetch phase", t); } successfulOps.decrementAndGet(); - if (counter.decrementAndGet() == 0) { + if (counter.countDown()) { finishHim(reducedQueryPhase); } } }); } else { // the counter is set to the total size of docIdsToLoad which can have null values so we have to count them down too - if (counter.decrementAndGet() == 0) { + if (counter.countDown()) { finishHim(reducedQueryPhase); } } @@ -218,7 +222,8 @@ class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction { private void finishHim(SearchPhaseController.ReducedQueryPhase queryPhase) { try { - final InternalSearchResponse internalResponse = searchPhaseController.merge(true, sortedShardDocs, queryPhase, fetchResults); + final InternalSearchResponse internalResponse = searchPhaseController.merge(true, sortedShardDocs, queryPhase, + fetchResults.asList(), fetchResults::get); String scrollId = null; if (request.scroll() != null) { scrollId = request.scrollId(); diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/core/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index 4ebf7c79c2a..80583e24c9c 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.fetch.FetchSearchResult; @@ -42,7 +43,6 @@ import org.elasticsearch.search.internal.InternalScrollSearchRequest; import org.elasticsearch.search.internal.ShardSearchTransportRequest; import org.elasticsearch.search.query.QuerySearchRequest; import org.elasticsearch.search.query.QuerySearchResult; -import org.elasticsearch.search.query.QuerySearchResultProvider; import org.elasticsearch.search.query.ScrollQuerySearchResult; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -118,17 +118,17 @@ public class SearchTransportService extends AbstractLifecycleComponent { } public void sendExecuteDfs(Transport.Connection connection, final ShardSearchTransportRequest request, SearchTask task, - final ActionListener listener) { + final SearchActionListener listener) { transportService.sendChildRequest(connection, DFS_ACTION_NAME, request, task, new ActionListenerResponseHandler<>(listener, DfsSearchResult::new)); } public void sendExecuteQuery(Transport.Connection connection, final ShardSearchTransportRequest request, SearchTask task, - final ActionListener listener) { + final SearchActionListener listener) { // we optimize this and expect a QueryFetchSearchResult if we only have a single shard in the search request // this used to be the QUERY_AND_FETCH which doesn't exists anymore. final boolean fetchDocuments = request.numberOfShards() == 1; - Supplier supplier = fetchDocuments ? QueryFetchSearchResult::new : QuerySearchResult::new; + Supplier supplier = fetchDocuments ? QueryFetchSearchResult::new : QuerySearchResult::new; if (connection.getVersion().onOrBefore(Version.V_5_3_0_UNRELEASED) && fetchDocuments) { // TODO this BWC layer can be removed once this is back-ported to 5.3 transportService.sendChildRequest(connection, QUERY_FETCH_ACTION_NAME, request, task, @@ -140,35 +140,35 @@ public class SearchTransportService extends AbstractLifecycleComponent { } public void sendExecuteQuery(Transport.Connection connection, final QuerySearchRequest request, SearchTask task, - final ActionListener listener) { + final SearchActionListener listener) { transportService.sendChildRequest(connection, QUERY_ID_ACTION_NAME, request, task, new ActionListenerResponseHandler<>(listener, QuerySearchResult::new)); } - public void sendExecuteQuery(DiscoveryNode node, final InternalScrollSearchRequest request, SearchTask task, - final ActionListener listener) { + public void sendExecuteScrollQuery(DiscoveryNode node, final InternalScrollSearchRequest request, SearchTask task, + final SearchActionListener listener) { transportService.sendChildRequest(transportService.getConnection(node), QUERY_SCROLL_ACTION_NAME, request, task, new ActionListenerResponseHandler<>(listener, ScrollQuerySearchResult::new)); } - public void sendExecuteFetch(DiscoveryNode node, final InternalScrollSearchRequest request, SearchTask task, - final ActionListener listener) { + public void sendExecuteScrollFetch(DiscoveryNode node, final InternalScrollSearchRequest request, SearchTask task, + final SearchActionListener listener) { transportService.sendChildRequest(transportService.getConnection(node), QUERY_FETCH_SCROLL_ACTION_NAME, request, task, new ActionListenerResponseHandler<>(listener, ScrollQueryFetchSearchResult::new)); } public void sendExecuteFetch(Transport.Connection connection, final ShardFetchSearchRequest request, SearchTask task, - final ActionListener listener) { + final SearchActionListener listener) { sendExecuteFetch(connection, FETCH_ID_ACTION_NAME, request, task, listener); } public void sendExecuteFetchScroll(DiscoveryNode node, final ShardFetchRequest request, SearchTask task, - final ActionListener listener) { + final SearchActionListener listener) { sendExecuteFetch(transportService.getConnection(node), FETCH_ID_SCROLL_ACTION_NAME, request, task, listener); } private void sendExecuteFetch(Transport.Connection connection, String action, final ShardFetchRequest request, SearchTask task, - final ActionListener listener) { + final SearchActionListener listener) { transportService.sendChildRequest(connection, action, request, task, new ActionListenerResponseHandler<>(listener, FetchSearchResult::new)); } @@ -327,7 +327,7 @@ public class SearchTransportService extends AbstractLifecycleComponent { new TaskAwareTransportRequestHandler() { @Override public void messageReceived(ShardSearchTransportRequest request, TransportChannel channel, Task task) throws Exception { - QuerySearchResultProvider result = searchService.executeQueryPhase(request, (SearchTask)task); + SearchPhaseResult result = searchService.executeQueryPhase(request, (SearchTask)task); channel.sendResponse(result); } }); @@ -361,7 +361,7 @@ public class SearchTransportService extends AbstractLifecycleComponent { @Override public void messageReceived(ShardSearchTransportRequest request, TransportChannel channel, Task task) throws Exception { assert request.numberOfShards() == 1 : "expected single shard request but got: " + request.numberOfShards(); - QuerySearchResultProvider result = searchService.executeQueryPhase(request, (SearchTask)task); + SearchPhaseResult result = searchService.executeQueryPhase(request, (SearchTask)task); channel.sendResponse(result); } }); diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java b/core/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java index 975c0be6f0f..e494bb6768d 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java @@ -38,10 +38,9 @@ final class TransportSearchHelper { try (RAMOutputStream out = new RAMOutputStream()) { out.writeString(searchPhaseResults.length() == 1 ? ParsedScrollId.QUERY_AND_FETCH_TYPE : ParsedScrollId.QUERY_THEN_FETCH_TYPE); out.writeVInt(searchPhaseResults.asList().size()); - for (AtomicArray.Entry entry : searchPhaseResults.asList()) { - SearchPhaseResult searchPhaseResult = entry.value; - out.writeLong(searchPhaseResult.id()); - out.writeString(searchPhaseResult.shardTarget().getNodeId()); + for (SearchPhaseResult searchPhaseResult : searchPhaseResults.asList()) { + out.writeLong(searchPhaseResult.getRequestId()); + out.writeString(searchPhaseResult.getSearchShardTarget().getNodeId()); } byte[] bytes = new byte[(int) out.getFilePointer()]; out.writeTo(bytes, 0); diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java index 10f8741eccc..ae4ae78c033 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java @@ -22,16 +22,12 @@ package org.elasticsearch.action.support.replication; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.WriteResponse; -import org.elasticsearch.client.transport.NoNodeAvailableException; import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; @@ -46,7 +42,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportService; -import org.apache.logging.log4j.core.pattern.ConverterKeys; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -302,15 +297,21 @@ public abstract class TransportWriteAction< } void run() { - // we either respond immediately ie. if we we don't fsync per request or wait for refresh - // OR we got an pass async operations on and wait for them to return to respond. - indexShard.maybeFlush(); - maybeFinish(); // decrement the pendingOpts by one, if there is nothing else to do we just respond with success. + /* + * We either respond immediately (i.e., if we do not fsync per request or wait for + * refresh), or we there are past async operations and we wait for them to return to + * respond. + */ + indexShard.afterWriteOperation(); + // decrement pending by one, if there is nothing else to do we just respond with success + maybeFinish(); if (waitUntilRefresh) { assert pendingOps.get() > 0; indexShard.addRefreshListener(location, forcedRefresh -> { if (forcedRefresh) { - logger.warn("block_until_refresh request ran out of slots and forced a refresh: [{}]", request); + logger.warn( + "block until refresh ran out of slots and forced a refresh: [{}]", + request); } refreshed.set(forcedRefresh); maybeFinish(); diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java b/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java index 9d1cf5e37e7..86d158784c0 100644 --- a/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java @@ -135,14 +135,14 @@ public abstract class TransportTasksAction< } List results = new ArrayList<>(); List exceptions = new ArrayList<>(); - for (AtomicArray.Entry> response : responses.asList()) { - if (response.value.v1() == null) { - assert response.value.v2() != null; + for (Tuple response : responses.asList()) { + if (response.v1() == null) { + assert response.v2() != null; exceptions.add(new TaskOperationFailure(clusterService.localNode().getId(), tasks.get(taskIndex).getId(), - response.value.v2())); + response.v2())); } else { - assert response.value.v2() == null; - results.add(response.value.v1()); + assert response.v2() == null; + results.add(response.v1()); } } listener.onResponse(new NodeTasksResponse(clusterService.localNode().getId(), results, exceptions)); diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index 7f5482afb0d..756ff80ddad 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -122,6 +122,7 @@ public class UpdateHelper extends AbstractComponent { .setRefreshPolicy(request.getRefreshPolicy()) .routing(request.routing()) .parent(request.parent()) + .timeout(request.timeout()) .waitForActiveShards(request.waitForActiveShards()); if (request.versionType() != VersionType.INTERNAL) { // in all but the internal versioning mode, we want to create the new document using the given version. @@ -188,12 +189,14 @@ public class UpdateHelper extends AbstractComponent { .source(updatedSourceAsMap, updateSourceContentType) .version(updateVersion).versionType(request.versionType()) .waitForActiveShards(request.waitForActiveShards()) + .timeout(request.timeout()) .setRefreshPolicy(request.getRefreshPolicy()); return new Result(indexRequest, DocWriteResponse.Result.UPDATED, updatedSourceAsMap, updateSourceContentType); } else if ("delete".equals(operation)) { DeleteRequest deleteRequest = Requests.deleteRequest(request.index()).type(request.type()).id(request.id()).routing(routing).parent(parent) .version(updateVersion).versionType(request.versionType()) .waitForActiveShards(request.waitForActiveShards()) + .timeout(request.timeout()) .setRefreshPolicy(request.getRefreshPolicy()); return new Result(deleteRequest, DocWriteResponse.Result.DELETED, updatedSourceAsMap, updateSourceContentType); } else if ("none".equals(operation)) { diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java index 7e82852a9f3..3d371f0f23f 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.monitor.process.ProcessProbe; import org.elasticsearch.node.Node; @@ -73,7 +74,7 @@ final class BootstrapChecks { final List combinedChecks = new ArrayList<>(builtInChecks); combinedChecks.addAll(additionalChecks); check( - enforceLimits(boundTransportAddress), + enforceLimits(boundTransportAddress, DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings)), Collections.unmodifiableList(combinedChecks), Node.NODE_NAME_SETTING.get(settings)); } @@ -164,13 +165,16 @@ final class BootstrapChecks { * Tests if the checks should be enforced. * * @param boundTransportAddress the node network bindings + * @param discoveryType the discovery type * @return {@code true} if the checks should be enforced */ - static boolean enforceLimits(final BoundTransportAddress boundTransportAddress) { - Predicate isLoopbackOrLinkLocalAddress = + static boolean enforceLimits(final BoundTransportAddress boundTransportAddress, final String discoveryType) { + final Predicate isLoopbackOrLinkLocalAddress = t -> t.address().getAddress().isLinkLocalAddress() || t.address().getAddress().isLoopbackAddress(); - return !(Arrays.stream(boundTransportAddress.boundAddresses()).allMatch(isLoopbackOrLinkLocalAddress) && + final boolean bound = + !(Arrays.stream(boundTransportAddress.boundAddresses()).allMatch(isLoopbackOrLinkLocalAddress) && isLoopbackOrLinkLocalAddress.test(boundTransportAddress.publishAddress())); + return bound && !"single-node".equals(discoveryType); } // the list of checks to execute @@ -195,6 +199,7 @@ final class BootstrapChecks { checks.add(new SystemCallFilterCheck(BootstrapSettings.SYSTEM_CALL_FILTER_SETTING.get(settings))); checks.add(new OnErrorCheck()); checks.add(new OnOutOfMemoryErrorCheck()); + checks.add(new EarlyAccessCheck()); checks.add(new G1GCCheck()); return Collections.unmodifiableList(checks); } @@ -577,6 +582,34 @@ final class BootstrapChecks { } + /** + * Bootstrap check for early-access builds from OpenJDK. + */ + static class EarlyAccessCheck implements BootstrapCheck { + + @Override + public boolean check() { + return "Oracle Corporation".equals(jvmVendor()) && javaVersion().endsWith("-ea"); + } + + String jvmVendor() { + return Constants.JVM_VENDOR; + } + + String javaVersion() { + return Constants.JAVA_VERSION; + } + + @Override + public String errorMessage() { + return String.format( + Locale.ROOT, + "Java version [%s] is an early-access build, only use release builds", + javaVersion()); + } + + } + /** * Bootstrap check for versions of HotSpot that are known to have issues that can lead to index corruption when G1GC is enabled. */ diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Spawner.java b/core/src/main/java/org/elasticsearch/bootstrap/Spawner.java index 44cf2d2b0aa..53983cb472e 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Spawner.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Spawner.java @@ -19,9 +19,10 @@ package org.elasticsearch.bootstrap; -import org.apache.lucene.util.Constants; import org.apache.lucene.util.IOUtils; import org.elasticsearch.env.Environment; +import org.elasticsearch.plugins.PluginInfo; +import org.elasticsearch.plugins.Platforms; import java.io.Closeable; import java.io.IOException; @@ -32,97 +33,89 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Locale; +import java.util.concurrent.atomic.AtomicBoolean; /** - * Spawns native plugin controller processes if present. Will only work prior to a system call filter being installed. + * Spawns native plugin controller processes if present. Will only work prior to a system call + * filter being installed. */ final class Spawner implements Closeable { - private static final String PROGRAM_NAME = Constants.WINDOWS ? "controller.exe" : "controller"; - private static final String PLATFORM_NAME = makePlatformName(Constants.OS_NAME, Constants.OS_ARCH); - private static final String TMP_ENVVAR = "TMPDIR"; - - /** + /* * References to the processes that have been spawned, so that we can destroy them. */ private final List processes = new ArrayList<>(); + private AtomicBoolean spawned = new AtomicBoolean(); @Override public void close() throws IOException { - try { - IOUtils.close(() -> processes.stream().map(s -> (Closeable)s::destroy).iterator()); - } finally { - processes.clear(); - } + IOUtils.close(() -> processes.stream().map(s -> (Closeable) s::destroy).iterator()); } /** - * For each plugin, attempt to spawn the controller daemon. Silently ignore any plugins - * that don't include a controller for the correct platform. + * Spawns the native controllers for each plugin + * + * @param environment the node environment + * @throws IOException if an I/O error occurs reading the plugins or spawning a native process */ - void spawnNativePluginControllers(Environment environment) throws IOException { - if (Files.exists(environment.pluginsFile())) { - try (DirectoryStream stream = Files.newDirectoryStream(environment.pluginsFile())) { - for (Path plugin : stream) { - Path spawnPath = makeSpawnPath(plugin); - if (Files.isRegularFile(spawnPath)) { - spawnNativePluginController(spawnPath, environment.tmpFile()); - } + void spawnNativePluginControllers(final Environment environment) throws IOException { + if (!spawned.compareAndSet(false, true)) { + throw new IllegalStateException("native controllers already spawned"); + } + final Path pluginsFile = environment.pluginsFile(); + if (!Files.exists(pluginsFile)) { + throw new IllegalStateException("plugins directory [" + pluginsFile + "] not found"); + } + /* + * For each plugin, attempt to spawn the controller daemon. Silently ignore any plugin that + * don't include a controller for the correct platform. + */ + try (DirectoryStream stream = Files.newDirectoryStream(pluginsFile)) { + for (final Path plugin : stream) { + final PluginInfo info = PluginInfo.readFromProperties(plugin); + final Path spawnPath = Platforms.nativeControllerPath(plugin); + if (!Files.isRegularFile(spawnPath)) { + continue; } + if (!info.hasNativeController()) { + final String message = String.format( + Locale.ROOT, + "plugin [%s] does not have permission to fork native controller", + plugin.getFileName()); + throw new IllegalArgumentException(message); + } + final Process process = + spawnNativePluginController(spawnPath, environment.tmpFile()); + processes.add(process); } } } /** - * Attempt to spawn the controller daemon for a given plugin. The spawned process - * will remain connected to this JVM via its stdin, stdout and stderr, but the - * references to these streams are not available to code outside this package. + * Attempt to spawn the controller daemon for a given plugin. The spawned process will remain + * connected to this JVM via its stdin, stdout, and stderr streams, but the references to these + * streams are not available to code outside this package. */ - private void spawnNativePluginController(Path spawnPath, Path tmpPath) throws IOException { - ProcessBuilder pb = new ProcessBuilder(spawnPath.toString()); + private Process spawnNativePluginController( + final Path spawnPath, + final Path tmpPath) throws IOException { + final ProcessBuilder pb = new ProcessBuilder(spawnPath.toString()); - // The only environment variable passes on the path to the temporary directory + // the only environment variable passes on the path to the temporary directory pb.environment().clear(); - pb.environment().put(TMP_ENVVAR, tmpPath.toString()); + pb.environment().put("TMPDIR", tmpPath.toString()); - // The output stream of the Process object corresponds to the daemon's stdin - processes.add(pb.start()); + // the output stream of the process object corresponds to the daemon's stdin + return pb.start(); } + /** + * The collection of processes representing spawned native controllers. + * + * @return the processes + */ List getProcesses() { return Collections.unmodifiableList(processes); } - /** - * Make the full path to the program to be spawned. - */ - static Path makeSpawnPath(Path plugin) { - return plugin.resolve("platform").resolve(PLATFORM_NAME).resolve("bin").resolve(PROGRAM_NAME); - } - - /** - * Make the platform name in the format used in Kibana downloads, for example: - * - darwin-x86_64 - * - linux-x86-64 - * - windows-x86_64 - * For *nix platforms this is more-or-less `uname -s`-`uname -m` converted to lower case. - * However, for consistency between different operating systems on the same architecture - * "amd64" is replaced with "x86_64" and "i386" with "x86". - * For Windows it's "windows-" followed by either "x86" or "x86_64". - */ - static String makePlatformName(String osName, String osArch) { - String os = osName.toLowerCase(Locale.ROOT); - if (os.startsWith("windows")) { - os = "windows"; - } else if (os.equals("mac os x")) { - os = "darwin"; - } - String cpu = osArch.toLowerCase(Locale.ROOT); - if (cpu.equals("amd64")) { - cpu = "x86_64"; - } else if (cpu.equals("i386")) { - cpu = "x86"; - } - return os + "-" + cpu; - } } diff --git a/core/src/main/java/org/elasticsearch/client/Client.java b/core/src/main/java/org/elasticsearch/client/Client.java index 0cf22d7a2c4..663b820dc39 100644 --- a/core/src/main/java/org/elasticsearch/client/Client.java +++ b/core/src/main/java/org/elasticsearch/client/Client.java @@ -30,6 +30,10 @@ import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.explain.ExplainRequest; import org.elasticsearch.action.explain.ExplainRequestBuilder; import org.elasticsearch.action.explain.ExplainResponse; +import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequestBuilder; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.fieldstats.FieldStatsRequest; import org.elasticsearch.action.fieldstats.FieldStatsRequestBuilder; import org.elasticsearch.action.fieldstats.FieldStatsResponse; @@ -458,6 +462,21 @@ public interface Client extends ElasticsearchClient, Releasable { void fieldStats(FieldStatsRequest request, ActionListener listener); + /** + * Builder for the field capabilities request. + */ + FieldCapabilitiesRequestBuilder prepareFieldCaps(); + + /** + * An action that returns the field capabilities from the provided request + */ + ActionFuture fieldCaps(FieldCapabilitiesRequest request); + + /** + * An action that returns the field capabilities from the provided request + */ + void fieldCaps(FieldCapabilitiesRequest request, ActionListener listener); + /** * Returns this clients settings */ diff --git a/core/src/main/java/org/elasticsearch/client/IndicesAdminClient.java b/core/src/main/java/org/elasticsearch/client/IndicesAdminClient.java index 03fdee2db34..b254039910c 100644 --- a/core/src/main/java/org/elasticsearch/client/IndicesAdminClient.java +++ b/core/src/main/java/org/elasticsearch/client/IndicesAdminClient.java @@ -50,6 +50,9 @@ import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRespon import org.elasticsearch.action.admin.indices.exists.types.TypesExistsRequest; import org.elasticsearch.action.admin.indices.exists.types.TypesExistsRequestBuilder; import org.elasticsearch.action.admin.indices.exists.types.TypesExistsResponse; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequestBuilder; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.flush.FlushRequestBuilder; import org.elasticsearch.action.admin.indices.flush.FlushResponse; @@ -817,5 +820,4 @@ public interface IndicesAdminClient extends ElasticsearchClient { * Swaps the index pointed to by an alias given all provided conditions are satisfied */ void rolloverIndex(RolloverRequest request, ActionListener listener); - } diff --git a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java index e0ebcfe70a9..726875a6d5c 100644 --- a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -272,6 +272,10 @@ import org.elasticsearch.action.explain.ExplainAction; import org.elasticsearch.action.explain.ExplainRequest; import org.elasticsearch.action.explain.ExplainRequestBuilder; import org.elasticsearch.action.explain.ExplainResponse; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesAction; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequestBuilder; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.fieldstats.FieldStatsAction; import org.elasticsearch.action.fieldstats.FieldStatsRequest; import org.elasticsearch.action.fieldstats.FieldStatsRequestBuilder; @@ -667,6 +671,21 @@ public abstract class AbstractClient extends AbstractComponent implements Client return new FieldStatsRequestBuilder(this, FieldStatsAction.INSTANCE); } + @Override + public void fieldCaps(FieldCapabilitiesRequest request, ActionListener listener) { + execute(FieldCapabilitiesAction.INSTANCE, request, listener); + } + + @Override + public ActionFuture fieldCaps(FieldCapabilitiesRequest request) { + return execute(FieldCapabilitiesAction.INSTANCE, request); + } + + @Override + public FieldCapabilitiesRequestBuilder prepareFieldCaps() { + return new FieldCapabilitiesRequestBuilder(this, FieldCapabilitiesAction.INSTANCE); + } + static class Admin implements AdminClient { private final ClusterAdmin clusterAdmin; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index 68575634a1e..713fce2848f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -183,7 +183,7 @@ public class IndexMetaData implements Diffable, ToXContent { throw new IllegalArgumentException("es.index.max_number_of_shards must be > 0"); } return Setting.intSetting(SETTING_NUMBER_OF_SHARDS, Math.min(5, maxNumShards), 1, maxNumShards, - Property.IndexScope); + Property.IndexScope, Property.Final); } public static final String INDEX_SETTING_PREFIX = "index."; @@ -226,7 +226,6 @@ public class IndexMetaData implements Diffable, ToXContent { public static final String SETTING_VERSION_CREATED_STRING = "index.version.created_string"; public static final String SETTING_VERSION_UPGRADED = "index.version.upgraded"; public static final String SETTING_VERSION_UPGRADED_STRING = "index.version.upgraded_string"; - public static final String SETTING_VERSION_MINIMUM_COMPATIBLE = "index.version.minimum_compatible"; public static final String SETTING_CREATION_DATE = "index.creation_date"; /** * The user provided name for an index. This is the plain string provided by the user when the index was created. @@ -311,7 +310,6 @@ public class IndexMetaData implements Diffable, ToXContent { private final Version indexCreatedVersion; private final Version indexUpgradedVersion; - private final org.apache.lucene.util.Version minimumCompatibleLuceneVersion; private final ActiveShardCount waitForActiveShards; @@ -319,7 +317,7 @@ public class IndexMetaData implements Diffable, ToXContent { ImmutableOpenMap mappings, ImmutableOpenMap aliases, ImmutableOpenMap customs, ImmutableOpenIntMap> inSyncAllocationIds, DiscoveryNodeFilters requireFilters, DiscoveryNodeFilters initialRecoveryFilters, DiscoveryNodeFilters includeFilters, DiscoveryNodeFilters excludeFilters, - Version indexCreatedVersion, Version indexUpgradedVersion, org.apache.lucene.util.Version minimumCompatibleLuceneVersion, + Version indexCreatedVersion, Version indexUpgradedVersion, int routingNumShards, int routingPartitionSize, ActiveShardCount waitForActiveShards) { this.index = index; @@ -341,7 +339,6 @@ public class IndexMetaData implements Diffable, ToXContent { this.initialRecoveryFilters = initialRecoveryFilters; this.indexCreatedVersion = indexCreatedVersion; this.indexUpgradedVersion = indexUpgradedVersion; - this.minimumCompatibleLuceneVersion = minimumCompatibleLuceneVersion; this.routingNumShards = routingNumShards; this.routingFactor = routingNumShards / numberOfShards; this.routingPartitionSize = routingPartitionSize; @@ -401,13 +398,6 @@ public class IndexMetaData implements Diffable, ToXContent { return indexUpgradedVersion; } - /** - * Return the {@link org.apache.lucene.util.Version} of the oldest lucene segment in the index - */ - public org.apache.lucene.util.Version getMinimumCompatibleVersion() { - return minimumCompatibleLuceneVersion; - } - public long getCreationDate() { return settings.getAsLong(SETTING_CREATION_DATE, -1L); } @@ -1052,17 +1042,6 @@ public class IndexMetaData implements Diffable, ToXContent { } Version indexCreatedVersion = Version.indexCreated(settings); Version indexUpgradedVersion = settings.getAsVersion(IndexMetaData.SETTING_VERSION_UPGRADED, indexCreatedVersion); - String stringLuceneVersion = settings.get(SETTING_VERSION_MINIMUM_COMPATIBLE); - final org.apache.lucene.util.Version minimumCompatibleLuceneVersion; - if (stringLuceneVersion != null) { - try { - minimumCompatibleLuceneVersion = org.apache.lucene.util.Version.parse(stringLuceneVersion); - } catch (ParseException ex) { - throw new IllegalStateException("Cannot parse lucene version [" + stringLuceneVersion + "] in the [" + SETTING_VERSION_MINIMUM_COMPATIBLE + "] setting", ex); - } - } else { - minimumCompatibleLuceneVersion = null; - } if (primaryTerms == null) { initializePrimaryTerms(); @@ -1081,7 +1060,7 @@ public class IndexMetaData implements Diffable, ToXContent { final String uuid = settings.get(SETTING_INDEX_UUID, INDEX_UUID_NA_VALUE); return new IndexMetaData(new Index(index, uuid), version, primaryTerms, state, numberOfShards, numberOfReplicas, tmpSettings, mappings.build(), tmpAliases.build(), customs.build(), filledInSyncAllocationIds.build(), requireFilters, initialRecoveryFilters, includeFilters, excludeFilters, - indexCreatedVersion, indexUpgradedVersion, minimumCompatibleLuceneVersion, getRoutingNumShards(), routingPartitionSize, waitForActiveShards); + indexCreatedVersion, indexUpgradedVersion, getRoutingNumShards(), routingPartitionSize, waitForActiveShards); } public static void toXContent(IndexMetaData indexMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index 0bde4a23b03..f23915be15d 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -603,9 +603,6 @@ public class MetaDataCreateIndexService extends AbstractComponent { .put(IndexMetaData.SETTING_ROUTING_PARTITION_SIZE, sourceMetaData.getRoutingPartitionSize()) .put(IndexMetaData.INDEX_SHRINK_SOURCE_NAME.getKey(), shrinkFromIndex.getName()) .put(IndexMetaData.INDEX_SHRINK_SOURCE_UUID.getKey(), shrinkFromIndex.getUUID()); - if (sourceMetaData.getMinimumCompatibleVersion() != null) { - indexSettingsBuilder.put(IndexMetaData.SETTING_VERSION_MINIMUM_COMPATIBLE, sourceMetaData.getMinimumCompatibleVersion()); - } } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java index ae9eb0d1b16..ac172ab661a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java @@ -165,10 +165,6 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements indexScopedSettings.validate(normalizedSettings); // never allow to change the number of shards for (Map.Entry entry : normalizedSettings.getAsMap().entrySet()) { - if (entry.getKey().equals(IndexMetaData.SETTING_NUMBER_OF_SHARDS)) { - listener.onFailure(new IllegalArgumentException("can't change the number of shards for an index")); - return; - } Setting setting = indexScopedSettings.get(entry.getKey()); assert setting != null; // we already validated the normalized settings settingsForClosedIndices.put(entry.getKey(), entry.getValue()); @@ -329,7 +325,6 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements // No reason to pollute the settings, we didn't really upgrade anything metaDataBuilder.put(IndexMetaData.builder(indexMetaData) .settings(Settings.builder().put(indexMetaData.getSettings()) - .put(IndexMetaData.SETTING_VERSION_MINIMUM_COMPATIBLE, entry.getValue().v2()) .put(IndexMetaData.SETTING_VERSION_UPGRADED, entry.getValue().v1()) ) ); diff --git a/core/src/main/java/org/elasticsearch/common/joda/Joda.java b/core/src/main/java/org/elasticsearch/common/joda/Joda.java index 7978ceff48c..c9eaa9ab3aa 100644 --- a/core/src/main/java/org/elasticsearch/common/joda/Joda.java +++ b/core/src/main/java/org/elasticsearch/common/joda/Joda.java @@ -333,9 +333,10 @@ public class Joda { boolean isPositive = text.startsWith("-") == false; boolean isTooLong = text.length() > estimateParsedLength(); - if ((isPositive && isTooLong) || - // timestamps have to have UTC timezone - bucket.getZone() != DateTimeZone.UTC) { + if (bucket.getZone() != DateTimeZone.UTC) { + String format = hasMilliSecondPrecision ? "epoch_millis" : "epoch_second"; + throw new IllegalArgumentException("time_zone must be UTC for format [" + format + "]"); + } else if (isPositive && isTooLong) { return -1; } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java index bbcea3041fc..bf448b61539 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -57,6 +57,8 @@ import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.search.grouping.CollapseTopFieldDocs; +import org.apache.lucene.search.SortedNumericSortField; +import org.apache.lucene.search.SortedSetSortField; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; @@ -552,7 +554,22 @@ public class Lucene { SortField newSortField = new SortField(sortField.getField(), SortField.Type.DOUBLE); newSortField.setMissingValue(sortField.getMissingValue()); sortField = newSortField; + } else if (sortField.getClass() == SortedSetSortField.class) { + // for multi-valued sort field, we replace the SortedSetSortField with a simple SortField. + // It works because the sort field is only used to merge results from different shards. + SortField newSortField = new SortField(sortField.getField(), SortField.Type.STRING, sortField.getReverse()); + newSortField.setMissingValue(sortField.getMissingValue()); + sortField = newSortField; + } else if (sortField.getClass() == SortedNumericSortField.class) { + // for multi-valued sort field, we replace the SortedSetSortField with a simple SortField. + // It works because the sort field is only used to merge results from different shards. + SortField newSortField = new SortField(sortField.getField(), + ((SortedNumericSortField) sortField).getNumericType(), + sortField.getReverse()); + newSortField.setMissingValue(sortField.getMissingValue()); + sortField = newSortField; } + if (sortField.getClass() != SortField.class) { throw new IllegalArgumentException("Cannot serialize SortField impl [" + sortField + "]"); } diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index 65493b93cda..05b7d96c8f6 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -35,8 +35,6 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.SortedMap; -import java.util.TreeMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.BiConsumer; import java.util.function.Consumer; @@ -382,11 +380,19 @@ public abstract class AbstractScopedSettings extends AbstractComponent { /** * Returns true if the setting for the given key is dynamically updateable. Otherwise false. */ - public boolean hasDynamicSetting(String key) { + public boolean isDynamicSetting(String key) { final Setting setting = get(key); return setting != null && setting.isDynamic(); } + /** + * Returns true if the setting for the given key is final. Otherwise false. + */ + public boolean isFinalSetting(String key) { + final Setting setting = get(key); + return setting != null && setting.isFinal(); + } + /** * Returns a settings object that contains all settings that are not * already set in the given source. The diff contains either the default value for each @@ -465,11 +471,14 @@ public abstract class AbstractScopedSettings extends AbstractComponent { boolean changed = false; final Set toRemove = new HashSet<>(); Settings.Builder settingsBuilder = Settings.builder(); - final Predicate canUpdate = (key) -> (onlyDynamic == false && get(key) != null) || hasDynamicSetting(key); - final Predicate canRemove = (key) ->( // we can delete if - onlyDynamic && hasDynamicSetting(key) // it's a dynamicSetting and we only do dynamic settings - || get(key) == null && key.startsWith(ARCHIVED_SETTINGS_PREFIX) // the setting is not registered AND it's been archived - || (onlyDynamic == false && get(key) != null)); // if it's not dynamic AND we have a key + final Predicate canUpdate = (key) -> ( + isFinalSetting(key) == false && // it's not a final setting + ((onlyDynamic == false && get(key) != null) || isDynamicSetting(key))); + final Predicate canRemove = (key) ->(// we can delete if + isFinalSetting(key) == false && // it's not a final setting + (onlyDynamic && isDynamicSetting(key) // it's a dynamicSetting and we only do dynamic settings + || get(key) == null && key.startsWith(ARCHIVED_SETTINGS_PREFIX) // the setting is not registered AND it's been archived + || (onlyDynamic == false && get(key) != null))); // if it's not dynamic AND we have a key for (Map.Entry entry : toApply.getAsMap().entrySet()) { if (entry.getValue() == null && (canRemove.test(entry.getKey()) || entry.getKey().endsWith("*"))) { // this either accepts null values that suffice the canUpdate test OR wildcard expressions (key ends with *) @@ -482,7 +491,11 @@ public abstract class AbstractScopedSettings extends AbstractComponent { updates.put(entry.getKey(), entry.getValue()); changed = true; } else { - throw new IllegalArgumentException(type + " setting [" + entry.getKey() + "], not dynamically updateable"); + if (isFinalSetting(entry.getKey())) { + throw new IllegalArgumentException("final " + type + " setting [" + entry.getKey() + "], not updateable"); + } else { + throw new IllegalArgumentException(type + " setting [" + entry.getKey() + "], not dynamically updateable"); + } } } changed |= applyDeletes(toRemove, target, canRemove); diff --git a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index badd80d5aea..a072b68b277 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -125,6 +125,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING, EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING, IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING, + IndexSettings.INDEX_TRANSLOG_GENERATION_THRESHOLD_SIZE_SETTING, IndexFieldDataService.INDEX_FIELDDATA_CACHE_KEY, FieldMapper.IGNORE_MALFORMED_SETTING, FieldMapper.COERCE_SETTING, diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 864ceb487bf..633c861d1e2 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -95,6 +95,12 @@ public class Setting extends ToXContentToBytes { */ Dynamic, + /** + * mark this setting as final, not updateable even when the context is not dynamic + * ie. Setting this property on an index scoped setting will fail update when the index is closed + */ + Final, + /** * mark this setting as deprecated */ @@ -135,6 +141,9 @@ public class Setting extends ToXContentToBytes { this.properties = EMPTY_PROPERTIES; } else { this.properties = EnumSet.copyOf(Arrays.asList(properties)); + if (isDynamic() && isFinal()) { + throw new IllegalArgumentException("final setting [" + key + "] cannot be dynamic"); + } } } @@ -218,6 +227,13 @@ public class Setting extends ToXContentToBytes { return properties.contains(Property.Dynamic); } + /** + * Returns true if this setting is final, otherwise false + */ + public final boolean isFinal() { + return properties.contains(Property.Final); + } + /** * Returns the setting properties * @see Property diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/AtomicArray.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/AtomicArray.java index 2278220d9dd..2bf5e50a1c2 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/AtomicArray.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/AtomicArray.java @@ -40,7 +40,7 @@ public class AtomicArray { } private final AtomicReferenceArray array; - private volatile List> nonNullList; + private volatile List nonNullList; public AtomicArray(int size) { array = new AtomicReferenceArray<>(size); @@ -87,19 +87,18 @@ public class AtomicArray { } /** - * Returns the it as a non null list, with an Entry wrapping each value allowing to - * retain its index. + * Returns the it as a non null list. */ - public List> asList() { + public List asList() { if (nonNullList == null) { if (array == null || array.length() == 0) { nonNullList = Collections.emptyList(); } else { - List> list = new ArrayList<>(array.length()); + List list = new ArrayList<>(array.length()); for (int i = 0; i < array.length(); i++) { E e = array.get(i); if (e != null) { - list.add(new Entry<>(i, e)); + list.add(e); } } nonNullList = list; @@ -120,23 +119,4 @@ public class AtomicArray { } return a; } - - /** - * An entry within the array. - */ - public static class Entry { - /** - * The original index of the value within the array. - */ - public final int index; - /** - * The value. - */ - public final E value; - - public Entry(int index, E value) { - this.index = index; - this.value = value; - } - } } diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index 409a70eb649..1ce119636f7 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.store.Store; import java.io.Closeable; import java.io.IOException; @@ -75,6 +74,7 @@ public final class ThreadContext implements Closeable, Writeable { private static final ThreadContextStruct DEFAULT_CONTEXT = new ThreadContextStruct(); private final Map defaultHeader; private final ContextThreadLocal threadLocal; + private boolean isSystemContext; /** * Creates a new ThreadContext instance @@ -317,6 +317,21 @@ public final class ThreadContext implements Closeable, Writeable { return threadLocal.get() == DEFAULT_CONTEXT; } + /** + * Marks this thread context as an internal system context. This signals that actions in this context are issued + * by the system itself rather than by a user action. + */ + public void markAsSystemContext() { + threadLocal.set(threadLocal.get().setSystemContext()); + } + + /** + * Returns true iff this context is a system context + */ + public boolean isSystemContext() { + return threadLocal.get().isSystemContext; + } + /** * Returns true if the context is closed, otherwise true */ @@ -338,6 +353,7 @@ public final class ThreadContext implements Closeable, Writeable { private final Map requestHeaders; private final Map transientHeaders; private final Map> responseHeaders; + private final boolean isSystemContext; private ThreadContextStruct(StreamInput in) throws IOException { final int numRequest = in.readVInt(); @@ -349,27 +365,36 @@ public final class ThreadContext implements Closeable, Writeable { this.requestHeaders = requestHeaders; this.responseHeaders = in.readMapOfLists(StreamInput::readString, StreamInput::readString); this.transientHeaders = Collections.emptyMap(); + isSystemContext = false; // we never serialize this it's a transient flag + } + + private ThreadContextStruct setSystemContext() { + if (isSystemContext) { + return this; + } + return new ThreadContextStruct(requestHeaders, responseHeaders, transientHeaders, true); } private ThreadContextStruct(Map requestHeaders, Map> responseHeaders, - Map transientHeaders) { + Map transientHeaders, boolean isSystemContext) { this.requestHeaders = requestHeaders; this.responseHeaders = responseHeaders; this.transientHeaders = transientHeaders; + this.isSystemContext = isSystemContext; } /** * This represents the default context and it should only ever be called by {@link #DEFAULT_CONTEXT}. */ private ThreadContextStruct() { - this(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); + this(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), false); } private ThreadContextStruct putRequest(String key, String value) { Map newRequestHeaders = new HashMap<>(this.requestHeaders); putSingleHeader(key, value, newRequestHeaders); - return new ThreadContextStruct(newRequestHeaders, responseHeaders, transientHeaders); + return new ThreadContextStruct(newRequestHeaders, responseHeaders, transientHeaders, isSystemContext); } private void putSingleHeader(String key, String value, Map newHeaders) { @@ -387,7 +412,7 @@ public final class ThreadContext implements Closeable, Writeable { putSingleHeader(entry.getKey(), entry.getValue(), newHeaders); } newHeaders.putAll(this.requestHeaders); - return new ThreadContextStruct(newHeaders, responseHeaders, transientHeaders); + return new ThreadContextStruct(newHeaders, responseHeaders, transientHeaders, isSystemContext); } } @@ -408,7 +433,7 @@ public final class ThreadContext implements Closeable, Writeable { newResponseHeaders.put(key, entry.getValue()); } } - return new ThreadContextStruct(requestHeaders, newResponseHeaders, transientHeaders); + return new ThreadContextStruct(requestHeaders, newResponseHeaders, transientHeaders, isSystemContext); } private ThreadContextStruct putResponse(final String key, final String value, final Function uniqueValue) { @@ -432,7 +457,7 @@ public final class ThreadContext implements Closeable, Writeable { newResponseHeaders.put(key, Collections.singletonList(value)); } - return new ThreadContextStruct(requestHeaders, newResponseHeaders, transientHeaders); + return new ThreadContextStruct(requestHeaders, newResponseHeaders, transientHeaders, isSystemContext); } private ThreadContextStruct putTransient(String key, Object value) { @@ -440,7 +465,7 @@ public final class ThreadContext implements Closeable, Writeable { if (newTransient.putIfAbsent(key, value) != null) { throw new IllegalArgumentException("value for key [" + key + "] already present"); } - return new ThreadContextStruct(requestHeaders, responseHeaders, newTransient); + return new ThreadContextStruct(requestHeaders, responseHeaders, newTransient, isSystemContext); } boolean isEmpty() { diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java b/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java index ea3ae0c919b..2328b5a8616 100644 --- a/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java +++ b/core/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java @@ -19,6 +19,20 @@ package org.elasticsearch.discovery; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.single.SingleNodeDiscovery; +import org.elasticsearch.discovery.zen.UnicastHostsProvider; +import org.elasticsearch.discovery.zen.ZenDiscovery; +import org.elasticsearch.plugins.DiscoveryPlugin; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -28,21 +42,6 @@ import java.util.Optional; import java.util.function.Function; import java.util.function.Supplier; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.network.NetworkService; -import org.elasticsearch.common.settings.ClusterSettings; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Property; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.discovery.zen.UnicastHostsProvider; -import org.elasticsearch.discovery.zen.ZenDiscovery; -import org.elasticsearch.discovery.zen.ZenPing; -import org.elasticsearch.plugins.DiscoveryPlugin; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; - /** * A module for loading classes for node discovery. */ @@ -83,6 +82,7 @@ public class DiscoveryModule { discoveryTypes.put("zen", () -> new ZenDiscovery(settings, threadPool, transportService, namedWriteableRegistry, clusterService, hostsProvider)); discoveryTypes.put("none", () -> new NoneDiscovery(settings, clusterService, clusterService.getClusterSettings())); + discoveryTypes.put("single-node", () -> new SingleNodeDiscovery(settings, clusterService)); for (DiscoveryPlugin plugin : plugins) { plugin.getDiscoveryTypes(threadPool, transportService, namedWriteableRegistry, clusterService, hostsProvider).entrySet().forEach(entry -> { @@ -96,10 +96,12 @@ public class DiscoveryModule { if (discoverySupplier == null) { throw new IllegalArgumentException("Unknown discovery type [" + discoveryType + "]"); } + Loggers.getLogger(getClass(), settings).info("using discovery type [{}]", discoveryType); discovery = Objects.requireNonNull(discoverySupplier.get()); } public Discovery getDiscovery() { return discovery; } + } diff --git a/core/src/main/java/org/elasticsearch/discovery/single/SingleNodeDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/single/SingleNodeDiscovery.java new file mode 100644 index 00000000000..f4735c8bf3a --- /dev/null +++ b/core/src/main/java/org/elasticsearch/discovery/single/SingleNodeDiscovery.java @@ -0,0 +1,144 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.discovery.single; + +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateTaskConfig; +import org.elasticsearch.cluster.ClusterStateTaskExecutor; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.allocation.AllocationService; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Priority; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.Discovery; +import org.elasticsearch.discovery.DiscoverySettings; +import org.elasticsearch.discovery.DiscoveryStats; +import org.elasticsearch.discovery.zen.PendingClusterStateStats; +import org.elasticsearch.discovery.zen.PendingClusterStatesQueue; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * A discovery implementation where the only member of the cluster is the local node. + */ +public class SingleNodeDiscovery extends AbstractLifecycleComponent implements Discovery { + + private final ClusterService clusterService; + private final DiscoverySettings discoverySettings; + + public SingleNodeDiscovery(final Settings settings, final ClusterService clusterService) { + super(Objects.requireNonNull(settings)); + this.clusterService = Objects.requireNonNull(clusterService); + final ClusterSettings clusterSettings = + Objects.requireNonNull(clusterService.getClusterSettings()); + this.discoverySettings = new DiscoverySettings(settings, clusterSettings); + } + + @Override + public DiscoveryNode localNode() { + return clusterService.localNode(); + } + + @Override + public String nodeDescription() { + return clusterService.getClusterName().value() + "/" + clusterService.localNode().getId(); + } + + @Override + public void setAllocationService(final AllocationService allocationService) { + + } + + @Override + public void publish(final ClusterChangedEvent event, final AckListener listener) { + + } + + @Override + public DiscoveryStats stats() { + return new DiscoveryStats((PendingClusterStateStats) null); + } + + @Override + public DiscoverySettings getDiscoverySettings() { + return discoverySettings; + } + + @Override + public void startInitialJoin() { + final ClusterStateTaskExecutor executor = + new ClusterStateTaskExecutor() { + + @Override + public ClusterTasksResult execute( + final ClusterState current, + final List tasks) throws Exception { + assert tasks.size() == 1; + final DiscoveryNodes.Builder nodes = + DiscoveryNodes.builder(current.nodes()); + // always set the local node as master, there will not be other nodes + nodes.masterNodeId(localNode().getId()); + final ClusterState next = + ClusterState.builder(current).nodes(nodes).build(); + final ClusterTasksResult.Builder result = + ClusterTasksResult.builder(); + return result.successes(tasks).build(next); + } + + @Override + public boolean runOnlyOnMaster() { + return false; + } + + }; + final ClusterStateTaskConfig config = ClusterStateTaskConfig.build(Priority.URGENT); + clusterService.submitStateUpdateTasks( + "single-node-start-initial-join", + Collections.singletonMap(localNode(), (s, e) -> {}), config, executor); + } + + @Override + public int getMinimumMasterNodes() { + return 1; + } + + @Override + protected void doStart() { + + } + + @Override + protected void doStop() { + + } + + @Override + protected void doClose() throws IOException { + + } + +} diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java b/core/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java index 6f56a547d3f..442491e6b13 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java @@ -178,20 +178,28 @@ public class MembershipAction extends AbstractComponent { @Override public void messageReceived(ValidateJoinRequest request, TransportChannel channel) throws Exception { - ensureIndexCompatibility(Version.CURRENT.minimumIndexCompatibilityVersion(), request.state.getMetaData()); + ensureIndexCompatibility(Version.CURRENT, request.state.getMetaData()); // for now, the mere fact that we can serialize the cluster state acts as validation.... channel.sendResponse(TransportResponse.Empty.INSTANCE); } } /** - * Ensures that all indices are compatible with the supported index version. + * Ensures that all indices are compatible with the given node version. This will ensure that all indices in the given metadata + * will not be created with a newer version of elasticsearch as well as that all indices are newer or equal to the minimum index + * compatibility version. + * @see Version#minimumIndexCompatibilityVersion() * @throws IllegalStateException if any index is incompatible with the given version */ - static void ensureIndexCompatibility(final Version supportedIndexVersion, MetaData metaData) { + static void ensureIndexCompatibility(final Version nodeVersion, MetaData metaData) { + Version supportedIndexVersion = nodeVersion.minimumIndexCompatibilityVersion(); // we ensure that all indices in the cluster we join are compatible with us no matter if they are // closed or not we can't read mappings of these indices so we need to reject the join... for (IndexMetaData idxMetaData : metaData) { + if (idxMetaData.getCreationVersion().after(nodeVersion)) { + throw new IllegalStateException("index " + idxMetaData.getIndex() + " version not supported: " + + idxMetaData.getCreationVersion() + " the node version is: " + nodeVersion); + } if (idxMetaData.getCreationVersion().before(supportedIndexVersion)) { throw new IllegalStateException("index " + idxMetaData.getIndex() + " version not supported: " + idxMetaData.getCreationVersion() + " minimum compatible index version is: " + supportedIndexVersion); diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java b/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java index 2d84f5f863d..354425a3dca 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java @@ -453,7 +453,7 @@ public class NodeJoinController extends AbstractComponent { } // we do this validation quite late to prevent race conditions between nodes joining and importing dangling indices // we have to reject nodes that don't support all indices we have in this cluster - MembershipAction.ensureIndexCompatibility(minNodeVersion.minimumIndexCompatibilityVersion(), currentState.getMetaData()); + MembershipAction.ensureIndexCompatibility(minNodeVersion, currentState.getMetaData()); if (nodesChanged) { newState.nodes(nodesBuilder); return results.build(allocationService.reroute(newState.build(), "node_join")); diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index be6f52fc22c..7b24536346c 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -839,7 +839,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover } else { // we do this in a couple of places including the cluster update thread. This one here is really just best effort // to ensure we fail as fast as possible. - MembershipAction.ensureIndexCompatibility(node.getVersion().minimumIndexCompatibilityVersion(), state.getMetaData()); + MembershipAction.ensureIndexCompatibility(node.getVersion(), state.getMetaData()); // try and connect to the node, if it fails, we can raise an exception back to the client... transportService.connectToNode(node); diff --git a/core/src/main/java/org/elasticsearch/index/IndexSettings.java b/core/src/main/java/org/elasticsearch/index/IndexSettings.java index 47c7ffb71bc..4ae16255d5e 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/core/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -22,7 +22,6 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.index.MergePolicy; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -112,6 +111,25 @@ public final class IndexSettings { Setting.byteSizeSetting("index.translog.flush_threshold_size", new ByteSizeValue(512, ByteSizeUnit.MB), Property.Dynamic, Property.IndexScope); + /** + * The maximum size of a translog generation. This is independent of the maximum size of + * translog operations that have not been flushed. + */ + public static final Setting INDEX_TRANSLOG_GENERATION_THRESHOLD_SIZE_SETTING = + Setting.byteSizeSetting( + "index.translog.generation_threshold_size", + new ByteSizeValue(64, ByteSizeUnit.MB), + /* + * An empty translog occupies 43 bytes on disk. If the generation threshold is + * below this, the flush thread can get stuck in an infinite loop repeatedly + * rolling the generation as every new generation will already exceed the + * generation threshold. However, small thresholds are useful for testing so we + * do not add a large lower bound here. + */ + new ByteSizeValue(64, ByteSizeUnit.BYTES), + new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES), + new Property[]{Property.Dynamic, Property.IndexScope}); + public static final Setting INDEX_SEQ_NO_CHECKPOINT_SYNC_INTERVAL = Setting.timeSetting("index.seq_no.checkpoint_sync_interval", new TimeValue(30, TimeUnit.SECONDS), new TimeValue(-1, TimeUnit.MILLISECONDS), Property.Dynamic, Property.IndexScope); @@ -156,6 +174,7 @@ public final class IndexSettings { private volatile TimeValue refreshInterval; private final TimeValue globalCheckpointInterval; private volatile ByteSizeValue flushThresholdSize; + private volatile ByteSizeValue generationThresholdSize; private final MergeSchedulerConfig mergeSchedulerConfig; private final MergePolicyConfig mergePolicyConfig; private final IndexScopedSettings scopedSettings; @@ -250,6 +269,7 @@ public final class IndexSettings { refreshInterval = scopedSettings.get(INDEX_REFRESH_INTERVAL_SETTING); globalCheckpointInterval = scopedSettings.get(INDEX_SEQ_NO_CHECKPOINT_SYNC_INTERVAL); flushThresholdSize = scopedSettings.get(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING); + generationThresholdSize = scopedSettings.get(INDEX_TRANSLOG_GENERATION_THRESHOLD_SIZE_SETTING); mergeSchedulerConfig = new MergeSchedulerConfig(this); gcDeletesInMillis = scopedSettings.get(INDEX_GC_DELETES_SETTING).getMillis(); warmerEnabled = scopedSettings.get(INDEX_WARMER_ENABLED_SETTING); @@ -281,6 +301,9 @@ public final class IndexSettings { scopedSettings.addSettingsUpdateConsumer(INDEX_WARMER_ENABLED_SETTING, this::setEnableWarmer); scopedSettings.addSettingsUpdateConsumer(INDEX_GC_DELETES_SETTING, this::setGCDeletes); scopedSettings.addSettingsUpdateConsumer(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING, this::setTranslogFlushThresholdSize); + scopedSettings.addSettingsUpdateConsumer( + INDEX_TRANSLOG_GENERATION_THRESHOLD_SIZE_SETTING, + this::setGenerationThresholdSize); scopedSettings.addSettingsUpdateConsumer(INDEX_REFRESH_INTERVAL_SETTING, this::setRefreshInterval); scopedSettings.addSettingsUpdateConsumer(MAX_REFRESH_LISTENERS_PER_SHARD, this::setMaxRefreshListeners); scopedSettings.addSettingsUpdateConsumer(MAX_SLICES_PER_SCROLL, this::setMaxSlicesPerScroll); @@ -290,6 +313,10 @@ public final class IndexSettings { this.flushThresholdSize = byteSizeValue; } + private void setGenerationThresholdSize(final ByteSizeValue generationThresholdSize) { + this.generationThresholdSize = generationThresholdSize; + } + private void setGCDeletes(TimeValue timeValue) { this.gcDeletesInMillis = timeValue.getMillis(); } @@ -461,6 +488,19 @@ public final class IndexSettings { */ public ByteSizeValue getFlushThresholdSize() { return flushThresholdSize; } + /** + * Returns the generation threshold size. As sequence numbers can cause multiple generations to + * be preserved for rollback purposes, we want to keep the size of individual generations from + * growing too large to avoid excessive disk space consumption. Therefore, the translog is + * automatically rolled to a new generation when the current generation exceeds this generation + * threshold size. + * + * @return the generation threshold size + */ + public ByteSizeValue getGenerationThresholdSize() { + return generationThresholdSize; + } + /** * Returns the {@link MergeSchedulerConfig} */ diff --git a/core/src/main/java/org/elasticsearch/index/analysis/KeywordMarkerTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/KeywordMarkerTokenFilterFactory.java index 9cb01e75e55..a4cd4c41c97 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/KeywordMarkerTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/KeywordMarkerTokenFilterFactory.java @@ -21,32 +21,70 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.miscellaneous.PatternKeywordMarkerFilter; import org.apache.lucene.analysis.miscellaneous.SetKeywordMarkerFilter; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import java.util.Set; +import java.util.regex.Pattern; +/** + * A factory for creating keyword marker token filters that prevent tokens from + * being modified by stemmers. Two types of keyword marker filters are available: + * the {@link SetKeywordMarkerFilter} and the {@link PatternKeywordMarkerFilter}. + * + * The {@link SetKeywordMarkerFilter} uses a set of keywords to denote which tokens + * should be excluded from stemming. This filter is created if the settings include + * {@code keywords}, which contains the list of keywords, or {@code `keywords_path`}, + * which contains a path to a file in the config directory with the keywords. + * + * The {@link PatternKeywordMarkerFilter} uses a regular expression pattern to match + * against tokens that should be excluded from stemming. This filter is created if + * the settings include {@code keywords_pattern}, which contains the regular expression + * to match against. + */ public class KeywordMarkerTokenFilterFactory extends AbstractTokenFilterFactory { private final CharArraySet keywordLookup; + private final Pattern keywordPattern; public KeywordMarkerTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); boolean ignoreCase = settings.getAsBooleanLenientForPreEs6Indices(indexSettings.getIndexVersionCreated(), "ignore_case", false, deprecationLogger); - Set rules = Analysis.getWordSet(env, indexSettings.getIndexVersionCreated(), settings, "keywords"); - if (rules == null) { - throw new IllegalArgumentException("keyword filter requires either `keywords` or `keywords_path` to be configured"); + String patternString = settings.get("keywords_pattern"); + if (patternString != null) { + // a pattern for matching keywords is specified, as opposed to a + // set of keyword strings to match against + if (settings.get("keywords") != null || settings.get("keywords_path") != null) { + throw new IllegalArgumentException( + "cannot specify both `keywords_pattern` and `keywords` or `keywords_path`"); + } + keywordPattern = Pattern.compile(patternString); + keywordLookup = null; + } else { + Set rules = Analysis.getWordSet(env, indexSettings.getIndexVersionCreated(), settings, "keywords"); + if (rules == null) { + throw new IllegalArgumentException( + "keyword filter requires either `keywords`, `keywords_path`, " + + "or `keywords_pattern` to be configured"); + } + // a set of keywords (or a path to them) is specified + keywordLookup = new CharArraySet(rules, ignoreCase); + keywordPattern = null; } - keywordLookup = new CharArraySet(rules, ignoreCase); } @Override public TokenStream create(TokenStream tokenStream) { - return new SetKeywordMarkerFilter(tokenStream, keywordLookup); + if (keywordPattern != null) { + return new PatternKeywordMarkerFilter(tokenStream, keywordPattern); + } else { + return new SetKeywordMarkerFilter(tokenStream, keywordLookup); + } } } diff --git a/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java b/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java index 11cc838660f..f4876149cac 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java +++ b/core/src/main/java/org/elasticsearch/index/engine/ElasticsearchConcurrentMergeScheduler.java @@ -110,10 +110,15 @@ class ElasticsearchConcurrentMergeScheduler extends ConcurrentMergeScheduler { totalMergesNumDocs.inc(totalNumDocs); totalMergesSizeInBytes.inc(totalSizeInBytes); totalMerges.inc(tookMS); - - long stoppedMS = TimeValue.nsecToMSec(merge.rateLimiter.getTotalStoppedNS()); - long throttledMS = TimeValue.nsecToMSec(merge.rateLimiter.getTotalPausedNS()); - + long stoppedMS = TimeValue.nsecToMSec( + merge.getMergeProgress().getPauseTimes().get(MergePolicy.OneMergeProgress.PauseReason.STOPPED) + ); + long throttledMS = TimeValue.nsecToMSec( + merge.getMergeProgress().getPauseTimes().get(MergePolicy.OneMergeProgress.PauseReason.PAUSED) + ); + final Thread thread = Thread.currentThread(); + long totalBytesWritten = OneMergeHelper.getTotalBytesWritten(thread, merge); + double mbPerSec = OneMergeHelper.getMbPerSec(thread, merge); totalMergeStoppedTime.inc(stoppedMS); totalMergeThrottledTime.inc(throttledMS); @@ -125,8 +130,8 @@ class ElasticsearchConcurrentMergeScheduler extends ConcurrentMergeScheduler { totalNumDocs, TimeValue.timeValueMillis(stoppedMS), TimeValue.timeValueMillis(throttledMS), - merge.rateLimiter.getTotalBytesWritten()/1024f/1024f, - merge.rateLimiter.getMBPerSec()); + totalBytesWritten/1024f/1024f, + mbPerSec); if (tookMS > 20000) { // if more than 20 seconds, DEBUG log it logger.debug("{}", message); diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java index 97f10932e0f..0b63dfb8df8 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java @@ -85,9 +85,9 @@ public interface IndexFieldData extends IndexCompone FD loadDirect(LeafReaderContext context) throws Exception; /** - * Comparator used for sorting. + * Returns the {@link SortField} to used for sorting. */ - XFieldComparatorSource comparatorSource(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested); + SortField sortField(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse); /** * Clears any resources associated with this field data. @@ -136,17 +136,17 @@ public interface IndexFieldData extends IndexCompone } /** Whether missing values should be sorted first. */ - protected final boolean sortMissingFirst(Object missingValue) { + public final boolean sortMissingFirst(Object missingValue) { return "_first".equals(missingValue); } /** Whether missing values should be sorted last, this is the default. */ - protected final boolean sortMissingLast(Object missingValue) { + public final boolean sortMissingLast(Object missingValue) { return missingValue == null || "_last".equals(missingValue); } /** Return the missing object value according to the reduced type of the comparator. */ - protected final Object missingObject(Object missingValue, boolean reversed) { + public final Object missingObject(Object missingValue, boolean reversed) { if (sortMissingFirst(missingValue) || sortMissingLast(missingValue)) { final boolean min = sortMissingFirst(missingValue) ^ reversed; switch (reducedType()) { diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java index 3e756204002..2055208021e 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.fielddata.ordinals; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.SortField; import org.apache.lucene.util.Accountable; import org.elasticsearch.common.Nullable; import org.elasticsearch.index.AbstractIndexComponent; @@ -68,7 +69,7 @@ public abstract class GlobalOrdinalsIndexFieldData extends AbstractIndexComponen } @Override - public XFieldComparatorSource comparatorSource(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested) { + public SortField sortField(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse) { throw new UnsupportedOperationException("no global ordinals sorting yet"); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java index b35706961ba..8db38e59ce0 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.fielddata.plain; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.SortField; import org.elasticsearch.common.Nullable; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; @@ -43,7 +44,7 @@ public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFie } @Override - public final XFieldComparatorSource comparatorSource(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested) { + public SortField sortField(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse) { throw new IllegalArgumentException("can't sort on geo_point field without using specific sorting feature, like geo_distance"); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java index c5858afaf22..bdf1bbac332 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.fielddata.plain; +import org.apache.lucene.search.SortField; import org.apache.lucene.spatial.geopoint.document.GeoPointField; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefIterator; @@ -28,6 +29,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData; +import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; @@ -104,7 +106,7 @@ abstract class AbstractIndexGeoPointFieldData extends AbstractIndexFieldData) this, missingValue, sortMode, nested); + public SortField sortField(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse) { + XFieldComparatorSource source = new BytesRefFieldComparatorSource(this, missingValue, sortMode, nested); + /** + * Check if we can use a simple {@link SortedSetSortField} compatible with index sorting and + * returns a custom sort field otherwise. + */ + if (nested != null || + (sortMode != MultiValueMode.MAX && sortMode != MultiValueMode.MIN) || + (source.sortMissingLast(missingValue) == false && source.sortMissingFirst(missingValue) == false)) { + return new SortField(getFieldName(), source, reverse); + } + SortField sortField = new SortedSetSortField(fieldName, reverse, + sortMode == MultiValueMode.MAX ? SortedSetSelector.Type.MAX : SortedSetSelector.Type.MIN); + sortField.setMissingValue(source.sortMissingLast(missingValue) ^ reverse ? + SortedSetSortField.STRING_LAST : SortedSetSortField.STRING_FIRST); + return sortField; } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMappers.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMappers.java deleted file mode 100644 index 6e1732c94ea..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMappers.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Iterator; -import java.util.List; - -/** - * A holder for several {@link FieldMapper}. - */ -public class FieldMappers implements Iterable { - - private final FieldMapper[] fieldMappers; - private final List fieldMappersAsList; - - public FieldMappers() { - this.fieldMappers = new FieldMapper[0]; - this.fieldMappersAsList = Arrays.asList(fieldMappers); - } - - public FieldMappers(FieldMapper fieldMapper) { - this.fieldMappers = new FieldMapper[]{fieldMapper}; - this.fieldMappersAsList = Arrays.asList(this.fieldMappers); - } - - private FieldMappers(FieldMapper[] fieldMappers) { - this.fieldMappers = fieldMappers; - this.fieldMappersAsList = Arrays.asList(this.fieldMappers); - } - - public FieldMapper mapper() { - if (fieldMappers.length == 0) { - return null; - } - return fieldMappers[0]; - } - - public boolean isEmpty() { - return fieldMappers.length == 0; - } - - public List mappers() { - return this.fieldMappersAsList; - } - - @Override - public Iterator iterator() { - return fieldMappersAsList.iterator(); - } - - /** - * Concats and returns a new {@link FieldMappers}. - */ - public FieldMappers concat(FieldMapper mapper) { - FieldMapper[] newMappers = new FieldMapper[fieldMappers.length + 1]; - System.arraycopy(fieldMappers, 0, newMappers, 0, fieldMappers.length); - newMappers[fieldMappers.length] = mapper; - return new FieldMappers(newMappers); - } - - public FieldMappers remove(FieldMapper mapper) { - ArrayList list = new ArrayList<>(fieldMappers.length); - for (FieldMapper fieldMapper : fieldMappers) { - if (!fieldMapper.equals(mapper)) { // identify equality - list.add(fieldMapper); - } - } - return new FieldMappers(list.toArray(new FieldMapper[list.size()])); - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 93f22d42a80..55c2e4cb3c6 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -313,14 +313,14 @@ public abstract class MappedFieldType extends FieldType { /** Returns true if the field is searchable. * */ - protected boolean isSearchable() { + public boolean isSearchable() { return indexOptions() != IndexOptions.NONE; } /** Returns true if the field is aggregatable. * */ - protected boolean isAggregatable() { + public boolean isAggregatable() { try { fielddataBuilder(); return true; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java index f270ae74ca9..9a271916ac1 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java @@ -19,10 +19,10 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.document.Field; -import org.apache.lucene.document.DoubleRangeField; -import org.apache.lucene.document.FloatRangeField; -import org.apache.lucene.document.IntRangeField; -import org.apache.lucene.document.LongRangeField; +import org.apache.lucene.document.DoubleRange; +import org.apache.lucene.document.FloatRange; +import org.apache.lucene.document.IntRange; +import org.apache.lucene.document.LongRange; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; @@ -430,7 +430,7 @@ public class RangeFieldMapper extends FieldMapper { DATE("date_range", NumberType.LONG) { @Override public Field getRangeField(String name, Range r) { - return new LongRangeField(name, new long[] {r.from.longValue()}, new long[] {r.to.longValue()}); + return new LongRange(name, new long[] {r.from.longValue()}, new long[] {r.to.longValue()}); } private Number parse(DateMathParser dateMathParser, String dateStr) { return dateMathParser.parse(dateStr, () -> {throw new IllegalArgumentException("now is not used at indexing time");}); @@ -516,7 +516,7 @@ public class RangeFieldMapper extends FieldMapper { } @Override public Field getRangeField(String name, Range r) { - return new FloatRangeField(name, new float[] {r.from.floatValue()}, new float[] {r.to.floatValue()}); + return new FloatRange(name, new float[] {r.from.floatValue()}, new float[] {r.to.floatValue()}); } @Override public byte[] getBytes(Range r) { @@ -527,19 +527,19 @@ public class RangeFieldMapper extends FieldMapper { } @Override public Query withinQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { - return FloatRangeField.newWithinQuery(field, + return FloatRange.newWithinQuery(field, new float[] {includeFrom ? (Float)from : Math.nextUp((Float)from)}, new float[] {includeTo ? (Float)to : Math.nextDown((Float)to)}); } @Override public Query containsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { - return FloatRangeField.newContainsQuery(field, + return FloatRange.newContainsQuery(field, new float[] {includeFrom ? (Float)from : Math.nextUp((Float)from)}, new float[] {includeTo ? (Float)to : Math.nextDown((Float)to)}); } @Override public Query intersectsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { - return FloatRangeField.newIntersectsQuery(field, + return FloatRange.newIntersectsQuery(field, new float[] {includeFrom ? (Float)from : Math.nextUp((Float)from)}, new float[] {includeTo ? (Float)to : Math.nextDown((Float)to)}); } @@ -563,7 +563,7 @@ public class RangeFieldMapper extends FieldMapper { } @Override public Field getRangeField(String name, Range r) { - return new DoubleRangeField(name, new double[] {r.from.doubleValue()}, new double[] {r.to.doubleValue()}); + return new DoubleRange(name, new double[] {r.from.doubleValue()}, new double[] {r.to.doubleValue()}); } @Override public byte[] getBytes(Range r) { @@ -574,19 +574,19 @@ public class RangeFieldMapper extends FieldMapper { } @Override public Query withinQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { - return DoubleRangeField.newWithinQuery(field, + return DoubleRange.newWithinQuery(field, new double[] {includeFrom ? (Double)from : Math.nextUp((Double)from)}, new double[] {includeTo ? (Double)to : Math.nextDown((Double)to)}); } @Override public Query containsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { - return DoubleRangeField.newContainsQuery(field, + return DoubleRange.newContainsQuery(field, new double[] {includeFrom ? (Double)from : Math.nextUp((Double)from)}, new double[] {includeTo ? (Double)to : Math.nextDown((Double)to)}); } @Override public Query intersectsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { - return DoubleRangeField.newIntersectsQuery(field, + return DoubleRange.newIntersectsQuery(field, new double[] {includeFrom ? (Double)from : Math.nextUp((Double)from)}, new double[] {includeTo ? (Double)to : Math.nextDown((Double)to)}); } @@ -612,7 +612,7 @@ public class RangeFieldMapper extends FieldMapper { } @Override public Field getRangeField(String name, Range r) { - return new IntRangeField(name, new int[] {r.from.intValue()}, new int[] {r.to.intValue()}); + return new IntRange(name, new int[] {r.from.intValue()}, new int[] {r.to.intValue()}); } @Override public byte[] getBytes(Range r) { @@ -623,17 +623,17 @@ public class RangeFieldMapper extends FieldMapper { } @Override public Query withinQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { - return IntRangeField.newWithinQuery(field, new int[] {(Integer)from + (includeFrom ? 0 : 1)}, + return IntRange.newWithinQuery(field, new int[] {(Integer)from + (includeFrom ? 0 : 1)}, new int[] {(Integer)to - (includeTo ? 0 : 1)}); } @Override public Query containsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { - return IntRangeField.newContainsQuery(field, new int[] {(Integer)from + (includeFrom ? 0 : 1)}, + return IntRange.newContainsQuery(field, new int[] {(Integer)from + (includeFrom ? 0 : 1)}, new int[] {(Integer)to - (includeTo ? 0 : 1)}); } @Override public Query intersectsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { - return IntRangeField.newIntersectsQuery(field, new int[] {(Integer)from + (includeFrom ? 0 : 1)}, + return IntRange.newIntersectsQuery(field, new int[] {(Integer)from + (includeFrom ? 0 : 1)}, new int[] {(Integer)to - (includeTo ? 0 : 1)}); } }, @@ -656,7 +656,7 @@ public class RangeFieldMapper extends FieldMapper { } @Override public Field getRangeField(String name, Range r) { - return new LongRangeField(name, new long[] {r.from.longValue()}, new long[] {r.to.longValue()}); + return new LongRange(name, new long[] {r.from.longValue()}, new long[] {r.to.longValue()}); } @Override public byte[] getBytes(Range r) { @@ -669,17 +669,17 @@ public class RangeFieldMapper extends FieldMapper { } @Override public Query withinQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { - return LongRangeField.newWithinQuery(field, new long[] {(Long)from + (includeFrom ? 0 : 1)}, + return LongRange.newWithinQuery(field, new long[] {(Long)from + (includeFrom ? 0 : 1)}, new long[] {(Long)to - (includeTo ? 0 : 1)}); } @Override public Query containsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { - return LongRangeField.newContainsQuery(field, new long[] {(Long)from + (includeFrom ? 0 : 1)}, + return LongRange.newContainsQuery(field, new long[] {(Long)from + (includeFrom ? 0 : 1)}, new long[] {(Long)to - (includeTo ? 0 : 1)}); } @Override public Query intersectsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { - return LongRangeField.newIntersectsQuery(field, new long[] {(Long)from + (includeFrom ? 0 : 1)}, + return LongRange.newIntersectsQuery(field, new long[] {(Long)from + (includeFrom ? 0 : 1)}, new long[] {(Long)to - (includeTo ? 0 : 1)}); } }; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java index 62ff8bdede0..226ab905a27 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java @@ -28,8 +28,10 @@ import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.SortField; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -492,9 +494,9 @@ public class ScaledFloatFieldMapper extends FieldMapper { } @Override - public org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource comparatorSource(Object missingValue, - MultiValueMode sortMode, Nested nested) { - return new DoubleValuesComparatorSource(this, missingValue, sortMode, nested); + public SortField sortField(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse) { + final XFieldComparatorSource source = new DoubleValuesComparatorSource(this, missingValue, sortMode, nested); + return new SortField(getFieldName(), source, reverse); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index b41695eb8bd..6b2298ba62c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -287,7 +287,7 @@ public class TextFieldMapper extends FieldMapper { if (fielddata == false) { throw new IllegalArgumentException("Fielddata is disabled on text fields by default. Set fielddata=true on [" + name() + "] in order to load fielddata in memory by uninverting the inverted index. Note that this can however " - + "use significant memory."); + + "use significant memory. Alternatively use a keyword field instead."); } return new PagedBytesIndexFieldData.Builder(fielddataMinFrequency, fielddataMaxFrequency, fielddataMinSegmentSize); } diff --git a/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java index 5da9edcd2a5..c6f153f319c 100644 --- a/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java @@ -275,7 +275,7 @@ public class FuzzyQueryBuilder extends AbstractQueryBuilder i while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else { + } else if (token.isValue()) { if (TERM_FIELD.match(currentFieldName)) { value = parser.objectBytes(); } else if (VALUE_FIELD.match(currentFieldName)) { @@ -298,6 +298,9 @@ public class FuzzyQueryBuilder extends AbstractQueryBuilder i throw new ParsingException(parser.getTokenLocation(), "[fuzzy] query does not support [" + currentFieldName + "]"); } + } else { + throw new ParsingException(parser.getTokenLocation(), + "[" + NAME + "] unexpected token [" + token + "] after [" + currentFieldName + "]"); } } } else { diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 0e6054deccd..32d3d4d4bf8 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -771,27 +771,44 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl return engine.syncFlush(syncId, expectedCommitId); } - public Engine.CommitId flush(FlushRequest request) throws ElasticsearchException { - boolean waitIfOngoing = request.waitIfOngoing(); - boolean force = request.force(); - if (logger.isTraceEnabled()) { - logger.trace("flush with {}", request); - } - // we allows flush while recovering, since we allow for operations to happen - // while recovering, and we want to keep the translog at bay (up to deletes, which - // we don't gc). Yet, we don't use flush internally to clear deletes and flush the indexwriter since - // we use #writeIndexingBuffer for this now. + /** + * Executes the given flush request against the engine. + * + * @param request the flush request + * @return the commit ID + */ + public Engine.CommitId flush(FlushRequest request) { + final boolean waitIfOngoing = request.waitIfOngoing(); + final boolean force = request.force(); + logger.trace("flush with {}", request); + /* + * We allow flushes while recovery since we allow operations to happen while recovering and + * we want to keep the translog under control (up to deletes, which we do not GC). Yet, we + * do not use flush internally to clear deletes and flush the index writer since we use + * Engine#writeIndexingBuffer for this now. + */ verifyNotClosed(); - Engine engine = getEngine(); + final Engine engine = getEngine(); if (engine.isRecovering()) { - throw new IllegalIndexShardStateException(shardId(), state, "flush is only allowed if the engine is not recovery" + - " from translog"); + throw new IllegalIndexShardStateException( + shardId(), + state, + "flush is only allowed if the engine is not recovery from translog"); } - long time = System.nanoTime(); - Engine.CommitId commitId = engine.flush(force, waitIfOngoing); + final long time = System.nanoTime(); + final Engine.CommitId commitId = engine.flush(force, waitIfOngoing); flushMetric.inc(System.nanoTime() - time); return commitId; + } + /** + * Rolls the tranlog generation. + * + * @throws IOException if any file operations on the translog throw an I/O exception + */ + private void rollTranslogGeneration() throws IOException { + final Engine engine = getEngine(); + engine.getTranslog().rollGeneration(); } public void forceMerge(ForceMergeRequest forceMerge) throws IOException { @@ -1256,17 +1273,39 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl } /** - * Returns true iff this shard needs to be flushed due to too many translog operation or a too large transaction log. - * Otherwise false. + * Tests whether or not the translog should be flushed. This test is based on the current size + * of the translog comparted to the configured flush threshold size. + * + * @return {@code true} if the translog should be flushed */ boolean shouldFlush() { - Engine engine = getEngineOrNull(); + final Engine engine = getEngineOrNull(); if (engine != null) { try { - Translog translog = engine.getTranslog(); - return translog.sizeInBytes() > indexSettings.getFlushThresholdSize().getBytes(); - } catch (AlreadyClosedException ex) { - // that's fine we are already close - no need to flush + final Translog translog = engine.getTranslog(); + return translog.shouldFlush(); + } catch (final AlreadyClosedException e) { + // we are already closed, no need to flush or roll + } + } + return false; + } + + /** + * Tests whether or not the translog generation should be rolled to a new generation. This test + * is based on the size of the current generation compared to the configured generation + * threshold size. + * + * @return {@code true} if the current generation should be rolled to a new generation + */ + boolean shouldRollTranslogGeneration() { + final Engine engine = getEngineOrNull(); + if (engine != null) { + try { + final Translog translog = engine.getTranslog(); + return translog.shouldRollGeneration(); + } catch (final AlreadyClosedException e) { + // we are already closed, no need to flush or roll } } return false; @@ -1810,28 +1849,31 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl return indexSettings.getTranslogDurability(); } - private final AtomicBoolean asyncFlushRunning = new AtomicBoolean(); + // we can not protect with a lock since we "release" on a different thread + private final AtomicBoolean flushOrRollRunning = new AtomicBoolean(); /** - * Schedules a flush if needed but won't schedule more than one flush concurrently. The flush will be executed on the - * Flush thread-pool asynchronously. - * - * @return true if a new flush is scheduled otherwise false. + * Schedules a flush or translog generation roll if needed but will not schedule more than one + * concurrently. The operation will be executed asynchronously on the flush thread pool. */ - public boolean maybeFlush() { - if (shouldFlush()) { - if (asyncFlushRunning.compareAndSet(false, true)) { // we can't use a lock here since we "release" in a different thread - if (shouldFlush() == false) { - // we have to check again since otherwise there is a race when a thread passes - // the first shouldFlush() check next to another thread which flushes fast enough - // to finish before the current thread could flip the asyncFlushRunning flag. - // in that situation we have an extra unexpected flush. - asyncFlushRunning.compareAndSet(true, false); - } else { + public void afterWriteOperation() { + if (shouldFlush() || shouldRollTranslogGeneration()) { + if (flushOrRollRunning.compareAndSet(false, true)) { + /* + * We have to check again since otherwise there is a race when a thread passes the + * first check next to another thread which performs the operation quickly enough to + * finish before the current thread could flip the flag. In that situation, we have + * an extra operation. + * + * Additionally, a flush implicitly executes a translog generation roll so if we + * execute a flush then we do not need to check if we should roll the translog + * generation. + */ + if (shouldFlush()) { logger.debug("submitting async flush request"); - final AbstractRunnable abstractRunnable = new AbstractRunnable() { + final AbstractRunnable flush = new AbstractRunnable() { @Override - public void onFailure(Exception e) { + public void onFailure(final Exception e) { if (state != IndexShardState.CLOSED) { logger.warn("failed to flush index", e); } @@ -1844,16 +1886,38 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl @Override public void onAfter() { - asyncFlushRunning.compareAndSet(true, false); - maybeFlush(); // fire a flush up again if we have filled up the limits such that shouldFlush() returns true + flushOrRollRunning.compareAndSet(true, false); + afterWriteOperation(); } }; - threadPool.executor(ThreadPool.Names.FLUSH).execute(abstractRunnable); - return true; + threadPool.executor(ThreadPool.Names.FLUSH).execute(flush); + } else if (shouldRollTranslogGeneration()) { + logger.debug("submitting async roll translog generation request"); + final AbstractRunnable roll = new AbstractRunnable() { + @Override + public void onFailure(final Exception e) { + if (state != IndexShardState.CLOSED) { + logger.warn("failed to roll translog generation", e); + } + } + + @Override + protected void doRun() throws Exception { + rollTranslogGeneration(); + } + + @Override + public void onAfter() { + flushOrRollRunning.compareAndSet(true, false); + afterWriteOperation(); + } + }; + threadPool.executor(ThreadPool.Names.FLUSH).execute(roll); + } else { + flushOrRollRunning.compareAndSet(true, false); } } } - return false; } /** diff --git a/core/src/main/java/org/elasticsearch/index/similarity/BooleanSimilarityProvider.java b/core/src/main/java/org/elasticsearch/index/similarity/BooleanSimilarityProvider.java new file mode 100644 index 00000000000..e5db045f371 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/similarity/BooleanSimilarityProvider.java @@ -0,0 +1,48 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.similarity; + +import org.apache.lucene.search.similarities.BooleanSimilarity; +import org.elasticsearch.common.settings.Settings; + +/** + * {@link SimilarityProvider} for the {@link BooleanSimilarity}, + * which is a simple similarity that gives terms a score equal + * to their query boost only. This is useful in situations where + * a field does not need to be scored by a full-text ranking + * algorithm, but rather all that matters is whether the query + * terms matched or not. + */ +public class BooleanSimilarityProvider extends AbstractSimilarityProvider { + + private final BooleanSimilarity similarity = new BooleanSimilarity(); + + public BooleanSimilarityProvider(String name, Settings settings, Settings indexSettings) { + super(name); + } + + /** + * {@inheritDoc} + */ + @Override + public BooleanSimilarity get() { + return similarity; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java index 54aa940a71f..e8203af8523 100644 --- a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java +++ b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java @@ -47,6 +47,7 @@ public final class SimilarityService extends AbstractIndexComponent { Map> buildIn = new HashMap<>(); defaults.put("classic", ClassicSimilarityProvider::new); defaults.put("BM25", BM25SimilarityProvider::new); + defaults.put("boolean", BooleanSimilarityProvider::new); buildIn.put("classic", ClassicSimilarityProvider::new); buildIn.put("BM25", BM25SimilarityProvider::new); buildIn.put("DFR", DFRSimilarityProvider::new); diff --git a/core/src/main/java/org/elasticsearch/index/translog/Translog.java b/core/src/main/java/org/elasticsearch/index/translog/Translog.java index ee4d0a4391a..d9a8cc408f8 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.util.BigArrays; @@ -55,6 +56,7 @@ import java.nio.file.StandardCopyOption; import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.List; +import java.util.Locale; import java.util.Optional; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; @@ -329,7 +331,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC * Returns the generation of the current transaction log. */ public long currentFileGeneration() { - try (ReleasableLock lock = readLock.acquire()) { + try (ReleasableLock ignored = readLock.acquire()) { return current.getGeneration(); } } @@ -409,10 +411,9 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC public Location add(final Operation operation) throws IOException { final ReleasableBytesStreamOutput out = new ReleasableBytesStreamOutput(bigArrays); try { - final BufferedChecksumStreamOutput checksumStreamOutput = new BufferedChecksumStreamOutput(out); final long start = out.position(); out.skip(Integer.BYTES); - writeOperationNoSize(checksumStreamOutput, operation); + writeOperationNoSize(new BufferedChecksumStreamOutput(out), operation); final long end = out.position(); final int operationSize = (int) (end - Integer.BYTES - start); out.seek(start); @@ -442,6 +443,30 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC } } + /** + * Tests whether or not the translog should be flushed. This test is based on the current size + * of the translog comparted to the configured flush threshold size. + * + * @return {@code true} if the translog should be flushed + */ + public boolean shouldFlush() { + final long size = this.sizeInBytes(); + return size > this.indexSettings.getFlushThresholdSize().getBytes(); + } + + /** + * Tests whether or not the translog generation should be rolled to a new generation. This test + * is based on the size of the current generation compared to the configured generation + * threshold size. + * + * @return {@code true} if the current generation should be rolled to a new generation + */ + public boolean shouldRollGeneration() { + final long size = this.current.sizeInBytes(); + final long threshold = this.indexSettings.getGenerationThresholdSize().getBytes(); + return size > threshold; + } + /** * The a {@linkplain Location} that will sort after the {@linkplain Location} returned by the last write but before any locations which * can be returned by the next write. @@ -1322,44 +1347,63 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC out.writeInt((int) checksum); } + /** + * Roll the current translog generation into a new generation. This does not commit the + * translog. + * + * @throws IOException if an I/O exception occurred during any file operations + */ + public void rollGeneration() throws IOException { + try (Releasable ignored = writeLock.acquire()) { + try { + final TranslogReader reader = current.closeIntoReader(); + readers.add(reader); + final Path checkpoint = location.resolve(CHECKPOINT_FILE_NAME); + assert Checkpoint.read(checkpoint).generation == current.getGeneration(); + final Path generationCheckpoint = + location.resolve(getCommitCheckpointFileName(current.getGeneration())); + Files.copy(checkpoint, generationCheckpoint); + IOUtils.fsync(generationCheckpoint, false); + IOUtils.fsync(generationCheckpoint.getParent(), true); + // create a new translog file; this will sync it and update the checkpoint data; + current = createWriter(current.getGeneration() + 1); + logger.trace("current translog set to [{}]", current.getGeneration()); + } catch (final Exception e) { + IOUtils.closeWhileHandlingException(this); // tragic event + throw e; + } + } + } + @Override public long prepareCommit() throws IOException { - try (ReleasableLock lock = writeLock.acquire()) { + try (ReleasableLock ignored = writeLock.acquire()) { ensureOpen(); if (currentCommittingGeneration != NOT_SET_GENERATION) { - throw new IllegalStateException("already committing a translog with generation: " + currentCommittingGeneration); + final String message = String.format( + Locale.ROOT, + "already committing a translog with generation [%d]", + currentCommittingGeneration); + throw new IllegalStateException(message); } currentCommittingGeneration = current.getGeneration(); - TranslogReader currentCommittingTranslog = current.closeIntoReader(); - readers.add(currentCommittingTranslog); - Path checkpoint = location.resolve(CHECKPOINT_FILE_NAME); - assert Checkpoint.read(checkpoint).generation == currentCommittingTranslog.getGeneration(); - Path commitCheckpoint = location.resolve(getCommitCheckpointFileName(currentCommittingTranslog.getGeneration())); - Files.copy(checkpoint, commitCheckpoint); - IOUtils.fsync(commitCheckpoint, false); - IOUtils.fsync(commitCheckpoint.getParent(), true); - // create a new translog file - this will sync it and update the checkpoint data; - current = createWriter(current.getGeneration() + 1); - logger.trace("current translog set to [{}]", current.getGeneration()); - - } catch (Exception e) { - IOUtils.closeWhileHandlingException(this); // tragic event - throw e; + rollGeneration(); } - return 0L; + return 0; } @Override public long commit() throws IOException { - try (ReleasableLock lock = writeLock.acquire()) { + try (ReleasableLock ignored = writeLock.acquire()) { ensureOpen(); if (currentCommittingGeneration == NOT_SET_GENERATION) { prepareCommit(); } assert currentCommittingGeneration != NOT_SET_GENERATION; - assert readers.stream().filter(r -> r.getGeneration() == currentCommittingGeneration).findFirst().isPresent() - : "reader list doesn't contain committing generation [" + currentCommittingGeneration + "]"; - lastCommittedTranslogFileGeneration = current.getGeneration(); // this is important - otherwise old files will not be cleaned up + assert readers.stream().anyMatch(r -> r.getGeneration() == currentCommittingGeneration) + : "readers missing committing generation [" + currentCommittingGeneration + "]"; + // set the last committed generation otherwise old files will not be cleaned up + lastCommittedTranslogFileGeneration = currentCommittingGeneration + 1; currentCommittingGeneration = NOT_SET_GENERATION; trimUnreferencedReaders(); } diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index a4e4c83bc00..7bf80cc1986 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -121,7 +121,6 @@ import java.io.Closeable; import java.io.IOException; import java.nio.file.Files; import java.util.ArrayList; -import java.util.EnumSet; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -1145,7 +1144,7 @@ public class IndicesService extends AbstractLifecycleComponent final QuerySearchResult result = context.queryResult(); StreamInput in = new NamedWriteableAwareStreamInput(bytesReference.streamInput(), namedWriteableRegistry); result.readFromWithId(context.id(), in); - result.shardTarget(context.shardTarget()); + result.setSearchShardTarget(context.shardTarget()); } else if (context.queryResult().searchTimedOut()) { // we have to invalidate the cache entry if we cached a query result form a request that timed out. // we can't really throw exceptions in the loading part to signal a timed out search to the outside world since if there are diff --git a/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java b/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java index 90b1d32f4ae..73a3811f729 100644 --- a/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java +++ b/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java @@ -21,9 +21,13 @@ package org.elasticsearch.plugins; public class DummyPluginInfo extends PluginInfo { private DummyPluginInfo(String name, String description, String version, String classname) { - super(name, description, version, classname); + super(name, description, version, classname, false); } - public static final DummyPluginInfo INSTANCE = new DummyPluginInfo( - "dummy_plugin_name", "dummy plugin description", "dummy_plugin_version", "DummyPluginName"); + public static final DummyPluginInfo INSTANCE = + new DummyPluginInfo( + "dummy_plugin_name", + "dummy plugin description", + "dummy_plugin_version", + "DummyPluginName"); } diff --git a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index 7ab9996a709..7360eef9238 100644 --- a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -458,7 +458,7 @@ class InstallPluginCommand extends EnvironmentAwareCommand { // if it exists, confirm or warn the user Path policy = pluginRoot.resolve(PluginInfo.ES_PLUGIN_POLICY); if (Files.exists(policy)) { - PluginSecurity.readPolicy(policy, terminal, env, isBatch); + PluginSecurity.readPolicy(info, policy, terminal, env::tmpFile, isBatch); } return info; diff --git a/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java b/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java index 1c6c4f17ff2..c2b5ce34b54 100644 --- a/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java @@ -61,7 +61,7 @@ class ListPluginsCommand extends EnvironmentAwareCommand { PluginInfo info = PluginInfo.readFromProperties(env.pluginsFile().resolve(plugin.toAbsolutePath())); terminal.println(Terminal.Verbosity.VERBOSE, info.toString()); } catch (IllegalArgumentException e) { - if (e.getMessage().contains("incompatible with Elasticsearch")) { + if (e.getMessage().contains("incompatible with version")) { terminal.println("WARNING: " + e.getMessage()); } else { throw e; diff --git a/core/src/main/java/org/elasticsearch/plugins/Platforms.java b/core/src/main/java/org/elasticsearch/plugins/Platforms.java new file mode 100644 index 00000000000..62bb32a4e9a --- /dev/null +++ b/core/src/main/java/org/elasticsearch/plugins/Platforms.java @@ -0,0 +1,83 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugins; + +import org.apache.lucene.util.Constants; + +import java.nio.file.Path; +import java.util.Locale; + +/** + * Encapsulates platform-dependent methods for handling native components of plugins. + */ +public class Platforms { + + private static final String PROGRAM_NAME = Constants.WINDOWS ? "controller.exe" : "controller"; + private static final String PLATFORM_NAME = + Platforms.platformName(Constants.OS_NAME, Constants.OS_ARCH); + + private Platforms() {} + + /** + * The path to the native controller for a plugin with native components. + */ + public static Path nativeControllerPath(Path plugin) { + return plugin + .resolve("platform") + .resolve(PLATFORM_NAME) + .resolve("bin") + .resolve(PROGRAM_NAME); + } + + /** + * Return the platform name based on the OS name and + * - darwin-x86_64 + * - linux-x86-64 + * - windows-x86_64 + * For *nix platforms this is more-or-less `uname -s`-`uname -m` converted to lower case. + * However, for consistency between different operating systems on the same architecture + * "amd64" is replaced with "x86_64" and "i386" with "x86". + * For Windows it's "windows-" followed by either "x86" or "x86_64". + */ + public static String platformName(final String osName, final String osArch) { + final String lowerCaseOs = osName.toLowerCase(Locale.ROOT); + final String normalizedOs; + if (lowerCaseOs.startsWith("windows")) { + normalizedOs = "windows"; + } else if (lowerCaseOs.equals("mac os x")) { + normalizedOs = "darwin"; + } else { + normalizedOs = lowerCaseOs; + } + + final String lowerCaseArch = osArch.toLowerCase(Locale.ROOT); + final String normalizedArch; + if (lowerCaseArch.equals("amd64")) { + normalizedArch = "x86_64"; + } else if (lowerCaseArch.equals("i386")) { + normalizedArch = "x86"; + } else { + normalizedArch = lowerCaseArch; + } + + return normalizedOs + "-" + normalizedArch; + } + +} diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java b/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java index 3e241eadd37..943f9018e6f 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java @@ -16,6 +16,7 @@ * specific language governing permissions and limitations * under the License. */ + package org.elasticsearch.plugins; import org.elasticsearch.Version; @@ -30,133 +31,215 @@ import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; +import java.util.Locale; import java.util.Properties; +/** + * An in-memory representation of the plugin descriptor. + */ public class PluginInfo implements Writeable, ToXContent { public static final String ES_PLUGIN_PROPERTIES = "plugin-descriptor.properties"; public static final String ES_PLUGIN_POLICY = "plugin-security.policy"; - static final class Fields { - static final String NAME = "name"; - static final String DESCRIPTION = "description"; - static final String URL = "url"; - static final String VERSION = "version"; - static final String CLASSNAME = "classname"; - } - private final String name; private final String description; private final String version; private final String classname; + private final boolean hasNativeController; /** - * Information about plugins + * Construct plugin info. * - * @param name Its name - * @param description Its description - * @param version Version number + * @param name the name of the plugin + * @param description a description of the plugin + * @param version the version of Elasticsearch the plugin is built for + * @param classname the entry point to the plugin + * @param hasNativeController whether or not the plugin has a native controller */ - public PluginInfo(String name, String description, String version, String classname) { + public PluginInfo( + final String name, + final String description, + final String version, + final String classname, + final boolean hasNativeController) { this.name = name; this.description = description; this.version = version; this.classname = classname; + this.hasNativeController = hasNativeController; } - public PluginInfo(StreamInput in) throws IOException { + /** + * Construct plugin info from a stream. + * + * @param in the stream + * @throws IOException if an I/O exception occurred reading the plugin info from the stream + */ + public PluginInfo(final StreamInput in) throws IOException { this.name = in.readString(); this.description = in.readString(); this.version = in.readString(); this.classname = in.readString(); + if (in.getVersion().onOrAfter(Version.V_5_4_0_UNRELEASED)) { + hasNativeController = in.readBoolean(); + } else { + hasNativeController = false; + } } @Override - public void writeTo(StreamOutput out) throws IOException { + public void writeTo(final StreamOutput out) throws IOException { out.writeString(name); out.writeString(description); out.writeString(version); out.writeString(classname); + if (out.getVersion().onOrAfter(Version.V_5_4_0_UNRELEASED)) { + out.writeBoolean(hasNativeController); + } } /** reads (and validates) plugin metadata descriptor file */ - public static PluginInfo readFromProperties(Path dir) throws IOException { - Path descriptor = dir.resolve(ES_PLUGIN_PROPERTIES); - Properties props = new Properties(); + + /** + * Reads and validates the plugin descriptor file. + * + * @param path the path to the root directory for the plugin + * @return the plugin info + * @throws IOException if an I/O exception occurred reading the plugin descriptor + */ + public static PluginInfo readFromProperties(final Path path) throws IOException { + final Path descriptor = path.resolve(ES_PLUGIN_PROPERTIES); + final Properties props = new Properties(); try (InputStream stream = Files.newInputStream(descriptor)) { props.load(stream); } - String name = props.getProperty("name"); + final String name = props.getProperty("name"); if (name == null || name.isEmpty()) { - throw new IllegalArgumentException("Property [name] is missing in [" + descriptor + "]"); + throw new IllegalArgumentException( + "property [name] is missing in [" + descriptor + "]"); } - String description = props.getProperty("description"); + final String description = props.getProperty("description"); if (description == null) { - throw new IllegalArgumentException("Property [description] is missing for plugin [" + name + "]"); + throw new IllegalArgumentException( + "property [description] is missing for plugin [" + name + "]"); } - String version = props.getProperty("version"); + final String version = props.getProperty("version"); if (version == null) { - throw new IllegalArgumentException("Property [version] is missing for plugin [" + name + "]"); + throw new IllegalArgumentException( + "property [version] is missing for plugin [" + name + "]"); } - String esVersionString = props.getProperty("elasticsearch.version"); + final String esVersionString = props.getProperty("elasticsearch.version"); if (esVersionString == null) { - throw new IllegalArgumentException("Property [elasticsearch.version] is missing for plugin [" + name + "]"); + throw new IllegalArgumentException( + "property [elasticsearch.version] is missing for plugin [" + name + "]"); } - Version esVersion = Version.fromString(esVersionString); + final Version esVersion = Version.fromString(esVersionString); if (esVersion.equals(Version.CURRENT) == false) { - throw new IllegalArgumentException("Plugin [" + name + "] is incompatible with Elasticsearch [" + Version.CURRENT.toString() + - "]. Was designed for version [" + esVersionString + "]"); + final String message = String.format( + Locale.ROOT, + "plugin [%s] is incompatible with version [%s]; was designed for version [%s]", + name, + Version.CURRENT.toString(), + esVersionString); + throw new IllegalArgumentException(message); } - String javaVersionString = props.getProperty("java.version"); + final String javaVersionString = props.getProperty("java.version"); if (javaVersionString == null) { - throw new IllegalArgumentException("Property [java.version] is missing for plugin [" + name + "]"); + throw new IllegalArgumentException( + "property [java.version] is missing for plugin [" + name + "]"); } JarHell.checkVersionFormat(javaVersionString); JarHell.checkJavaVersion(name, javaVersionString); - String classname = props.getProperty("classname"); + final String classname = props.getProperty("classname"); if (classname == null) { - throw new IllegalArgumentException("Property [classname] is missing for plugin [" + name + "]"); + throw new IllegalArgumentException( + "property [classname] is missing for plugin [" + name + "]"); } - return new PluginInfo(name, description, version, classname); + final String hasNativeControllerValue = props.getProperty("has.native.controller"); + final boolean hasNativeController; + if (hasNativeControllerValue == null) { + hasNativeController = false; + } else { + switch (hasNativeControllerValue) { + case "true": + hasNativeController = true; + break; + case "false": + hasNativeController = false; + break; + default: + final String message = String.format( + Locale.ROOT, + "property [%s] must be [%s], [%s], or unspecified but was [%s]", + "has_native_controller", + "true", + "false", + hasNativeControllerValue); + throw new IllegalArgumentException(message); + } + } + + return new PluginInfo(name, description, version, classname, hasNativeController); } /** - * @return Plugin's name + * The name of the plugin. + * + * @return the plugin name */ public String getName() { return name; } /** - * @return Plugin's description if any + * The description of the plugin. + * + * @return the plugin description */ public String getDescription() { return description; } /** - * @return plugin's classname + * The entry point to the plugin. + * + * @return the entry point to the plugin */ public String getClassname() { return classname; } /** - * @return Version number for the plugin + * The version of Elasticsearch the plugin was built for. + * + * @return the version */ public String getVersion() { return version; } + /** + * Whether or not the plugin has a native controller. + * + * @return {@code true} if the plugin has a native controller + */ + public boolean hasNativeController() { + return hasNativeController; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(Fields.NAME, name); - builder.field(Fields.VERSION, version); - builder.field(Fields.DESCRIPTION, description); - builder.field(Fields.CLASSNAME, classname); + { + builder.field("name", name); + builder.field("version", version); + builder.field("description", description); + builder.field("classname", classname); + builder.field("has_native_controller", hasNativeController); + } builder.endObject(); return builder; @@ -187,8 +270,9 @@ public class PluginInfo implements Writeable, ToXContent { .append("Name: ").append(name).append("\n") .append("Description: ").append(description).append("\n") .append("Version: ").append(version).append("\n") + .append("Native Controller: ").append(hasNativeController).append("\n") .append(" * Classname: ").append(classname); - return information.toString(); } + } diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginSecurity.java b/core/src/main/java/org/elasticsearch/plugins/PluginSecurity.java index f9c3d1826c9..55a3c6069e7 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginSecurity.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginSecurity.java @@ -37,60 +37,74 @@ import java.security.UnresolvedPermission; import java.util.Collections; import java.util.Comparator; import java.util.List; +import java.util.function.Supplier; class PluginSecurity { /** * Reads plugin policy, prints/confirms exceptions */ - static void readPolicy(Path file, Terminal terminal, Environment environment, boolean batch) throws IOException { - PermissionCollection permissions = parsePermissions(terminal, file, environment.tmpFile()); + static void readPolicy(PluginInfo info, Path file, Terminal terminal, Supplier tmpFile, boolean batch) throws IOException { + PermissionCollection permissions = parsePermissions(terminal, file, tmpFile.get()); List requested = Collections.list(permissions.elements()); if (requested.isEmpty()) { terminal.println(Verbosity.VERBOSE, "plugin has a policy file with no additional permissions"); - return; - } + } else { - // sort permissions in a reasonable order - Collections.sort(requested, new Comparator() { - @Override - public int compare(Permission o1, Permission o2) { - int cmp = o1.getClass().getName().compareTo(o2.getClass().getName()); - if (cmp == 0) { - String name1 = o1.getName(); - String name2 = o2.getName(); - if (name1 == null) { - name1 = ""; - } - if (name2 == null) { - name2 = ""; - } - cmp = name1.compareTo(name2); + // sort permissions in a reasonable order + Collections.sort(requested, new Comparator() { + @Override + public int compare(Permission o1, Permission o2) { + int cmp = o1.getClass().getName().compareTo(o2.getClass().getName()); if (cmp == 0) { - String actions1 = o1.getActions(); - String actions2 = o2.getActions(); - if (actions1 == null) { - actions1 = ""; + String name1 = o1.getName(); + String name2 = o2.getName(); + if (name1 == null) { + name1 = ""; } - if (actions2 == null) { - actions2 = ""; + if (name2 == null) { + name2 = ""; + } + cmp = name1.compareTo(name2); + if (cmp == 0) { + String actions1 = o1.getActions(); + String actions2 = o2.getActions(); + if (actions1 == null) { + actions1 = ""; + } + if (actions2 == null) { + actions2 = ""; + } + cmp = actions1.compareTo(actions2); } - cmp = actions1.compareTo(actions2); } + return cmp; } - return cmp; - } - }); + }); - terminal.println(Verbosity.NORMAL, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@"); - terminal.println(Verbosity.NORMAL, "@ WARNING: plugin requires additional permissions @"); - terminal.println(Verbosity.NORMAL, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@"); - // print all permissions: - for (Permission permission : requested) { - terminal.println(Verbosity.NORMAL, "* " + formatPermission(permission)); + terminal.println(Verbosity.NORMAL, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@"); + terminal.println(Verbosity.NORMAL, "@ WARNING: plugin requires additional permissions @"); + terminal.println(Verbosity.NORMAL, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@"); + // print all permissions: + for (Permission permission : requested) { + terminal.println(Verbosity.NORMAL, "* " + formatPermission(permission)); + } + terminal.println(Verbosity.NORMAL, "See http://docs.oracle.com/javase/8/docs/technotes/guides/security/permissions.html"); + terminal.println(Verbosity.NORMAL, "for descriptions of what these permissions allow and the associated risks."); + prompt(terminal, batch); } - terminal.println(Verbosity.NORMAL, "See http://docs.oracle.com/javase/8/docs/technotes/guides/security/permissions.html"); - terminal.println(Verbosity.NORMAL, "for descriptions of what these permissions allow and the associated risks."); + + if (info.hasNativeController()) { + terminal.println(Verbosity.NORMAL, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@"); + terminal.println(Verbosity.NORMAL, "@ WARNING: plugin forks a native controller @"); + terminal.println(Verbosity.NORMAL, "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@"); + terminal.println(Verbosity.NORMAL, "This plugin launches a native controller that is not subject to the Java"); + terminal.println(Verbosity.NORMAL, "security manager nor to system call filters."); + prompt(terminal, batch); + } + } + + private static void prompt(final Terminal terminal, final boolean batch) { if (!batch) { terminal.println(Verbosity.NORMAL, ""); String text = terminal.readText("Continue with installation? [y/N]"); diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java index 9295c6c38d8..fc63678b94f 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -101,7 +101,7 @@ public class PluginsService extends AbstractComponent { // first we load plugins that are on the classpath. this is for tests and transport clients for (Class pluginClass : classpathPlugins) { Plugin plugin = loadPlugin(pluginClass, settings); - PluginInfo pluginInfo = new PluginInfo(pluginClass.getName(), "classpath plugin", "NA", pluginClass.getName()); + PluginInfo pluginInfo = new PluginInfo(pluginClass.getName(), "classpath plugin", "NA", pluginClass.getName(), false); if (logger.isTraceEnabled()) { logger.trace("plugin loaded from classpath [{}]", pluginInfo); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/RestFieldCapabilitiesAction.java b/core/src/main/java/org/elasticsearch/rest/action/RestFieldCapabilitiesAction.java new file mode 100644 index 00000000000..e983bdc182a --- /dev/null +++ b/core/src/main/java/org/elasticsearch/rest/action/RestFieldCapabilitiesAction.java @@ -0,0 +1,88 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action; + +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; + +import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.rest.RestStatus.NOT_FOUND; +import static org.elasticsearch.rest.RestStatus.OK; + +public class RestFieldCapabilitiesAction extends BaseRestHandler { + public RestFieldCapabilitiesAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(GET, "/_field_caps", this); + controller.registerHandler(POST, "/_field_caps", this); + controller.registerHandler(GET, "/{index}/_field_caps", this); + controller.registerHandler(POST, "/{index}/_field_caps", this); + } + + @Override + public RestChannelConsumer prepareRequest(final RestRequest request, + final NodeClient client) throws IOException { + if (request.hasContentOrSourceParam() && request.hasParam("fields")) { + throw new IllegalArgumentException("can't specify a request body and [fields]" + + " request parameter, either specify a request body or the" + + " [fields] request parameter"); + } + final String[] indices = Strings.splitStringByCommaToArray(request.param("index")); + final FieldCapabilitiesRequest fieldRequest; + if (request.hasContentOrSourceParam()) { + try (XContentParser parser = request.contentOrSourceParamParser()) { + fieldRequest = FieldCapabilitiesRequest.parseFields(parser); + } + } else { + fieldRequest = new FieldCapabilitiesRequest(); + fieldRequest.fields(Strings.splitStringByCommaToArray(request.param("fields"))); + } + fieldRequest.indices(indices); + fieldRequest.indicesOptions( + IndicesOptions.fromRequest(request, fieldRequest.indicesOptions()) + ); + return channel -> client.fieldCaps(fieldRequest, + new RestBuilderListener(channel) { + @Override + public RestResponse buildResponse(FieldCapabilitiesResponse response, + XContentBuilder builder) throws Exception { + RestStatus status = OK; + builder.startObject(); + response.toXContent(builder, request); + builder.endObject(); + return new BytesRestResponse(status, builder); + } + }); + } +} diff --git a/core/src/main/java/org/elasticsearch/search/SearchHit.java b/core/src/main/java/org/elasticsearch/search/SearchHit.java index 05558fd6f09..71b0b9127b2 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/core/src/main/java/org/elasticsearch/search/SearchHit.java @@ -493,7 +493,7 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable, Void> parser) { declareMetaDataFields(parser); - parser.declareString((map, value) -> map.put(Fields._TYPE, value), new ParseField(Fields._TYPE)); + parser.declareString((map, value) -> map.put(Fields._TYPE, new Text(value)), new ParseField(Fields._TYPE)); parser.declareString((map, value) -> map.put(Fields._INDEX, value), new ParseField(Fields._INDEX)); parser.declareString((map, value) -> map.put(Fields._ID, value), new ParseField(Fields._ID)); parser.declareString((map, value) -> map.put(Fields._NODE, value), new ParseField(Fields._NODE)); @@ -524,11 +524,11 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable values) { String id = get(Fields._ID, values, null); - String type = get(Fields._TYPE, values, null); + Text type = get(Fields._TYPE, values, null); NestedIdentity nestedIdentity = get(NestedIdentity._NESTED, values, null); Map fields = get(Fields.FIELDS, values, null); - SearchHit searchHit = new SearchHit(-1, id, new Text(type), nestedIdentity, fields); + SearchHit searchHit = new SearchHit(-1, id, type, nestedIdentity, fields); searchHit.index = get(Fields._INDEX, values, null); searchHit.score(get(Fields._SCORE, values, DEFAULT_SCORE)); searchHit.version(get(Fields._VERSION, values, -1L)); diff --git a/core/src/main/java/org/elasticsearch/search/SearchPhaseResult.java b/core/src/main/java/org/elasticsearch/search/SearchPhaseResult.java index 003f37616f5..ede9f525a5a 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchPhaseResult.java +++ b/core/src/main/java/org/elasticsearch/search/SearchPhaseResult.java @@ -20,12 +20,63 @@ package org.elasticsearch.search; import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.search.fetch.FetchSearchResult; +import org.elasticsearch.search.query.QuerySearchResult; +import org.elasticsearch.transport.TransportResponse; -public interface SearchPhaseResult extends Streamable { +/** + * This class is a base class for all search releated results. It contains the shard target it + * was executed against, a shard index used to reference the result on the coordinating node + * and a request ID that is used to reference the request context on the executing node. The + * request ID is particularly important since it is used to reference and maintain a context + * across search phases to ensure the same point in time snapshot is used for querying and + * fetching etc. + */ +public abstract class SearchPhaseResult extends TransportResponse implements Streamable { - long id(); + private SearchShardTarget searchShardTarget; + private int shardIndex = -1; + protected long requestId; - SearchShardTarget shardTarget(); + /** + * Returns the results request ID that is used to reference the search context on the executing + * node + */ + public long getRequestId() { + return requestId; + } - void shardTarget(SearchShardTarget shardTarget); + /** + * Returns the shard index in the context of the currently executing search request that is + * used for accounting on the coordinating node + */ + public int getShardIndex() { + assert shardIndex != -1 : "shardIndex is not set"; + return shardIndex; + } + + public SearchShardTarget getSearchShardTarget() { + return searchShardTarget; + } + + public void setSearchShardTarget(SearchShardTarget shardTarget) { + this.searchShardTarget = shardTarget; + } + + public void setShardIndex(int shardIndex) { + assert shardIndex >= 0 : "shardIndex must be >= 0 but was: " + shardIndex; + this.shardIndex = shardIndex; + } + + /** + * Returns the query result iff it's included in this response otherwise null + */ + public QuerySearchResult queryResult() { + return null; + } + + /** + * Returns the fetch result iff it's included in this response otherwise null + */ + public FetchSearchResult fetchResult() { return null; } } diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index 3d093e5ae72..a0352281952 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -75,7 +75,6 @@ import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QueryPhase; import org.elasticsearch.search.query.QuerySearchRequest; import org.elasticsearch.search.query.QuerySearchResult; -import org.elasticsearch.search.query.QuerySearchResultProvider; import org.elasticsearch.search.query.ScrollQuerySearchResult; import org.elasticsearch.search.rescore.RescoreBuilder; import org.elasticsearch.search.searchafter.SearchAfterBuilder; @@ -248,7 +247,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv } } - public QuerySearchResultProvider executeQueryPhase(ShardSearchRequest request, SearchTask task) throws IOException { + public SearchPhaseResult executeQueryPhase(ShardSearchRequest request, SearchTask task) throws IOException { final SearchContext context = createAndPutContext(request); final SearchOperationListener operationListener = context.indexShard().getSearchOperationListener(); context.incRef(); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlus.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlus.java index 42b4561e07b..6425cc3b68a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlus.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlus.java @@ -34,6 +34,9 @@ import org.elasticsearch.common.util.IntArray; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; +import java.util.HashSet; +import java.util.Objects; +import java.util.Set; /** * Hyperloglog++ counter, implemented based on pseudo code from @@ -420,6 +423,32 @@ public final class HyperLogLogPlusPlus implements Releasable { Releasables.close(runLens, hashSet.sizes); } + private Set getComparableData(long bucket) { + Set values = new HashSet<>(); + if (algorithm.get(bucket) == LINEAR_COUNTING) { + try (IntArray hashSetValues = hashSet.values(bucket)) { + for (long i = 0; i < hashSetValues.size(); i++) { + values.add(hashSetValues.get(i)); + } + } + } else { + for (long i = 0; i < runLens.size(); i++) { + values.add(runLens.get((bucket << p) + i)); + } + } + return values; + } + + public int hashCode(long bucket) { + return Objects.hash(p, algorithm.get(bucket), getComparableData(bucket)); + } + + public boolean equals(long bucket, HyperLogLogPlusPlus other) { + return Objects.equals(p, other.p) && + Objects.equals(algorithm.get(bucket), other.algorithm.get(bucket)) && + Objects.equals(getComparableData(bucket), getComparableData(bucket)); + } + /** * We are actually using HyperLogLog's runLens array but interpreting it as a hash set * for linear counting. diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/InternalCardinality.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/InternalCardinality.java index 68e8935616f..028e97a69ff 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/InternalCardinality.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/InternalCardinality.java @@ -113,4 +113,18 @@ public final class InternalCardinality extends InternalNumericMetricsAggregation return builder; } + @Override + protected int doHashCode() { + return counts.hashCode(0); + } + + @Override + protected boolean doEquals(Object obj) { + InternalCardinality other = (InternalCardinality) obj; + return counts.equals(0, other.counts); + } + + HyperLogLogPlusPlus getState() { + return counts; + } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalSimpleValue.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalSimpleValue.java index 0f8eec4e66a..a3c7012f7cd 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalSimpleValue.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalSimpleValue.java @@ -25,15 +25,15 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; -import org.elasticsearch.search.aggregations.metrics.max.InternalMax; import java.io.IOException; import java.util.List; import java.util.Map; +import java.util.Objects; public class InternalSimpleValue extends InternalNumericMetricsAggregation.SingleValue implements SimpleValue { public static final String NAME = "simple_value"; - private final double value; + protected final double value; public InternalSimpleValue(String name, double value, DocValueFormat formatter, List pipelineAggregators, Map metaData) { @@ -72,7 +72,7 @@ public class InternalSimpleValue extends InternalNumericMetricsAggregation.Singl } @Override - public InternalMax doReduce(List aggregations, ReduceContext reduceContext) { + public InternalSimpleValue doReduce(List aggregations, ReduceContext reduceContext) { throw new UnsupportedOperationException("Not supported"); } @@ -85,4 +85,15 @@ public class InternalSimpleValue extends InternalNumericMetricsAggregation.Singl } return builder; } + + @Override + protected int doHashCode() { + return Objects.hash(value); + } + + @Override + protected boolean doEquals(Object obj) { + InternalSimpleValue other = (InternalSimpleValue) obj; + return Objects.equals(value, other.value); + } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/InternalDerivative.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/InternalDerivative.java index e18c0d81eeb..db56f0f7c6f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/InternalDerivative.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/InternalDerivative.java @@ -29,6 +29,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; import java.util.List; import java.util.Map; +import java.util.Objects; public class InternalDerivative extends InternalSimpleValue implements Derivative { private final double normalizationFactor; @@ -89,4 +90,16 @@ public class InternalDerivative extends InternalSimpleValue implements Derivativ } return builder; } + + @Override + protected int doHashCode() { + return Objects.hash(normalizationFactor, value); + } + + @Override + protected boolean doEquals(Object obj) { + InternalDerivative other = (InternalDerivative) obj; + return Objects.equals(value, other.value) + && Objects.equals(normalizationFactor, other.normalizationFactor); + } } diff --git a/core/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java b/core/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java index bf3d9527246..0cd624b00a3 100644 --- a/core/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java +++ b/core/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java @@ -30,44 +30,24 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.transport.TransportResponse; import java.io.IOException; -public class DfsSearchResult extends TransportResponse implements SearchPhaseResult { +public class DfsSearchResult extends SearchPhaseResult { private static final Term[] EMPTY_TERMS = new Term[0]; private static final TermStatistics[] EMPTY_TERM_STATS = new TermStatistics[0]; - - private SearchShardTarget shardTarget; - private long id; private Term[] terms; private TermStatistics[] termStatistics; private ObjectObjectHashMap fieldStatistics = HppcMaps.newNoNullKeysMap(); private int maxDoc; public DfsSearchResult() { - } public DfsSearchResult(long id, SearchShardTarget shardTarget) { - this.id = id; - this.shardTarget = shardTarget; - } - - @Override - public long id() { - return this.id; - } - - @Override - public SearchShardTarget shardTarget() { - return shardTarget; - } - - @Override - public void shardTarget(SearchShardTarget shardTarget) { - this.shardTarget = shardTarget; + this.setSearchShardTarget(shardTarget); + this.requestId = id; } public DfsSearchResult maxDoc(int maxDoc) { @@ -105,7 +85,7 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - id = in.readLong(); + requestId = in.readLong(); int termsSize = in.readVInt(); if (termsSize == 0) { terms = EMPTY_TERMS; @@ -125,7 +105,7 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeLong(id); + out.writeLong(requestId); out.writeVInt(terms.length); for (Term term : terms) { out.writeString(term.field()); diff --git a/core/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java b/core/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java index 1e2def8cc61..a5f27733ad2 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java @@ -22,28 +22,25 @@ package org.elasticsearch.search.fetch; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.query.QuerySearchResult; -import org.elasticsearch.search.query.QuerySearchResultProvider; import java.io.IOException; -public class FetchSearchResult extends QuerySearchResultProvider { +public final class FetchSearchResult extends SearchPhaseResult { - private long id; - private SearchShardTarget shardTarget; private SearchHits hits; // client side counter private transient int counter; public FetchSearchResult() { - } public FetchSearchResult(long id, SearchShardTarget shardTarget) { - this.id = id; - this.shardTarget = shardTarget; + this.requestId = id; + setSearchShardTarget(shardTarget); } @Override @@ -56,21 +53,6 @@ public class FetchSearchResult extends QuerySearchResultProvider { return this; } - @Override - public long id() { - return this.id; - } - - @Override - public SearchShardTarget shardTarget() { - return this.shardTarget; - } - - @Override - public void shardTarget(SearchShardTarget shardTarget) { - this.shardTarget = shardTarget; - } - public void hits(SearchHits hits) { assert assertNoSearchTarget(hits); this.hits = hits; @@ -105,14 +87,14 @@ public class FetchSearchResult extends QuerySearchResultProvider { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - id = in.readLong(); + requestId = in.readLong(); hits = SearchHits.readSearchHits(in); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeLong(id); + out.writeLong(requestId); hits.writeTo(out); } } diff --git a/core/src/main/java/org/elasticsearch/search/fetch/QueryFetchSearchResult.java b/core/src/main/java/org/elasticsearch/search/fetch/QueryFetchSearchResult.java index 35c4dbd6597..8d1e6276e65 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/QueryFetchSearchResult.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/QueryFetchSearchResult.java @@ -21,22 +21,21 @@ package org.elasticsearch.search.fetch; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.query.QuerySearchResult; -import org.elasticsearch.search.query.QuerySearchResultProvider; import java.io.IOException; import static org.elasticsearch.search.fetch.FetchSearchResult.readFetchSearchResult; import static org.elasticsearch.search.query.QuerySearchResult.readQuerySearchResult; -public class QueryFetchSearchResult extends QuerySearchResultProvider { +public final class QueryFetchSearchResult extends SearchPhaseResult { private QuerySearchResult queryResult; private FetchSearchResult fetchResult; public QueryFetchSearchResult() { - } public QueryFetchSearchResult(QuerySearchResult queryResult, FetchSearchResult fetchResult) { @@ -45,19 +44,27 @@ public class QueryFetchSearchResult extends QuerySearchResultProvider { } @Override - public long id() { - return queryResult.id(); + public long getRequestId() { + return queryResult.getRequestId(); } @Override - public SearchShardTarget shardTarget() { - return queryResult.shardTarget(); + public SearchShardTarget getSearchShardTarget() { + return queryResult.getSearchShardTarget(); } @Override - public void shardTarget(SearchShardTarget shardTarget) { - queryResult.shardTarget(shardTarget); - fetchResult.shardTarget(shardTarget); + public void setSearchShardTarget(SearchShardTarget shardTarget) { + super.setSearchShardTarget(shardTarget); + queryResult.setSearchShardTarget(shardTarget); + fetchResult.setSearchShardTarget(shardTarget); + } + + @Override + public void setShardIndex(int requestIndex) { + super.setShardIndex(requestIndex); + queryResult.setShardIndex(requestIndex); + fetchResult.setShardIndex(requestIndex); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/fetch/ScrollQueryFetchSearchResult.java b/core/src/main/java/org/elasticsearch/search/fetch/ScrollQueryFetchSearchResult.java index e8a9af00127..55aa4a96d01 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/ScrollQueryFetchSearchResult.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/ScrollQueryFetchSearchResult.java @@ -21,46 +21,64 @@ package org.elasticsearch.search.fetch; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.search.query.QuerySearchResult; import java.io.IOException; import static org.elasticsearch.search.fetch.QueryFetchSearchResult.readQueryFetchSearchResult; -public class ScrollQueryFetchSearchResult extends TransportResponse { +public final class ScrollQueryFetchSearchResult extends SearchPhaseResult { private QueryFetchSearchResult result; - private SearchShardTarget shardTarget; public ScrollQueryFetchSearchResult() { } public ScrollQueryFetchSearchResult(QueryFetchSearchResult result, SearchShardTarget shardTarget) { this.result = result; - this.shardTarget = shardTarget; + setSearchShardTarget(shardTarget); } public QueryFetchSearchResult result() { return result; } - public SearchShardTarget shardTarget() { - return shardTarget; + @Override + public void setSearchShardTarget(SearchShardTarget shardTarget) { + super.setSearchShardTarget(shardTarget); + result.setSearchShardTarget(shardTarget); + } + + @Override + public void setShardIndex(int shardIndex) { + super.setShardIndex(shardIndex); + result.setShardIndex(shardIndex); + } + + @Override + public QuerySearchResult queryResult() { + return result.queryResult(); + } + + @Override + public FetchSearchResult fetchResult() { + return result.fetchResult(); } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - shardTarget = new SearchShardTarget(in); + SearchShardTarget searchShardTarget = new SearchShardTarget(in); result = readQueryFetchSearchResult(in); - result.shardTarget(shardTarget); + setSearchShardTarget(searchShardTarget); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - shardTarget.writeTo(out); + getSearchShardTarget().writeTo(out); result.writeTo(out); } } diff --git a/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java b/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java index a8d8ae74062..15403f99677 100644 --- a/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java +++ b/core/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java @@ -24,6 +24,7 @@ import org.apache.lucene.search.TopDocs; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregations; @@ -40,10 +41,8 @@ import static java.util.Collections.emptyList; import static org.elasticsearch.common.lucene.Lucene.readTopDocs; import static org.elasticsearch.common.lucene.Lucene.writeTopDocs; -public final class QuerySearchResult extends QuerySearchResultProvider { +public final class QuerySearchResult extends SearchPhaseResult { - private long id; - private SearchShardTarget shardTarget; private int from; private int size; private TopDocs topDocs; @@ -61,8 +60,8 @@ public final class QuerySearchResult extends QuerySearchResultProvider { } public QuerySearchResult(long id, SearchShardTarget shardTarget) { - this.id = id; - this.shardTarget = shardTarget; + this.requestId = id; + setSearchShardTarget(shardTarget); } @Override @@ -70,20 +69,6 @@ public final class QuerySearchResult extends QuerySearchResultProvider { return this; } - @Override - public long id() { - return this.id; - } - - @Override - public SearchShardTarget shardTarget() { - return shardTarget; - } - - @Override - public void shardTarget(SearchShardTarget shardTarget) { - this.shardTarget = shardTarget; - } public void searchTimedOut(boolean searchTimedOut) { this.searchTimedOut = searchTimedOut; @@ -230,7 +215,7 @@ public final class QuerySearchResult extends QuerySearchResultProvider { } public void readFromWithId(long id, StreamInput in) throws IOException { - this.id = id; + this.requestId = id; from = in.readVInt(); size = in.readVInt(); int numSortFieldsPlus1 = in.readVInt(); @@ -260,7 +245,7 @@ public final class QuerySearchResult extends QuerySearchResultProvider { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeLong(id); + out.writeLong(requestId); writeToNoId(out); } diff --git a/core/src/main/java/org/elasticsearch/search/query/ScrollQuerySearchResult.java b/core/src/main/java/org/elasticsearch/search/query/ScrollQuerySearchResult.java index 9137a72acb5..64014594899 100644 --- a/core/src/main/java/org/elasticsearch/search/query/ScrollQuerySearchResult.java +++ b/core/src/main/java/org/elasticsearch/search/query/ScrollQuerySearchResult.java @@ -21,46 +21,54 @@ package org.elasticsearch.search.query; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.transport.TransportResponse; import java.io.IOException; import static org.elasticsearch.search.query.QuerySearchResult.readQuerySearchResult; -public class ScrollQuerySearchResult extends TransportResponse { +public final class ScrollQuerySearchResult extends SearchPhaseResult { - private QuerySearchResult queryResult; - private SearchShardTarget shardTarget; + private QuerySearchResult result; public ScrollQuerySearchResult() { } - public ScrollQuerySearchResult(QuerySearchResult queryResult, SearchShardTarget shardTarget) { - this.queryResult = queryResult; - this.shardTarget = shardTarget; + public ScrollQuerySearchResult(QuerySearchResult result, SearchShardTarget shardTarget) { + this.result = result; + setSearchShardTarget(shardTarget); } + @Override + public void setSearchShardTarget(SearchShardTarget shardTarget) { + super.setSearchShardTarget(shardTarget); + result.setSearchShardTarget(shardTarget); + } + + @Override + public void setShardIndex(int shardIndex) { + super.setShardIndex(shardIndex); + result.setShardIndex(shardIndex); + } + + @Override public QuerySearchResult queryResult() { - return queryResult; - } - - public SearchShardTarget shardTarget() { - return shardTarget; + return result; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - shardTarget = new SearchShardTarget(in); - queryResult = readQuerySearchResult(in); - queryResult.shardTarget(shardTarget); + SearchShardTarget shardTarget = new SearchShardTarget(in); + result = readQuerySearchResult(in); + setSearchShardTarget(shardTarget); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - shardTarget.writeTo(out); - queryResult.writeTo(out); + getSearchShardTarget().writeTo(out); + result.writeTo(out); } } diff --git a/core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java b/core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java index bc73ad7925b..8a19f254a8f 100644 --- a/core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/searchafter/SearchAfterBuilder.java @@ -21,6 +21,8 @@ package org.elasticsearch.search.searchafter; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.SortField; +import org.apache.lucene.search.SortedNumericSortField; +import org.apache.lucene.search.SortedSetSortField; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; @@ -128,12 +130,23 @@ public class SearchAfterBuilder implements ToXContent, Writeable { return new FieldDoc(Integer.MAX_VALUE, 0, fieldValues); } + private static SortField.Type extractSortType(SortField sortField) { + if (sortField instanceof SortedSetSortField) { + return SortField.Type.STRING; + } else if (sortField instanceof SortedNumericSortField) { + return ((SortedNumericSortField) sortField).getNumericType(); + } else { + return sortField.getType(); + } + } + private static Object convertValueFromSortField(Object value, SortField sortField, DocValueFormat format) { if (sortField.getComparatorSource() instanceof IndexFieldData.XFieldComparatorSource) { IndexFieldData.XFieldComparatorSource cmpSource = (IndexFieldData.XFieldComparatorSource) sortField.getComparatorSource(); return convertValueFromSortType(sortField.getField(), cmpSource.reducedType(), value, format); } - return convertValueFromSortType(sortField.getField(), sortField.getType(), value, format); + SortField.Type sortType = extractSortType(sortField); + return convertValueFromSortType(sortField.getField(), sortType, value, format); } private static Object convertValueFromSortType(String fieldName, SortField.Type sortType, Object value, DocValueFormat format) { diff --git a/core/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java b/core/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java index 429a3ebe892..ddc02d32e55 100644 --- a/core/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java +++ b/core/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java @@ -33,6 +33,7 @@ import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ConstantScoreScorer; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.DocIdSetBuilder; +import org.apache.lucene.util.StringHelper; import java.io.IOException; @@ -46,6 +47,9 @@ import java.io.IOException; * NOTE: Documents with no value for that field are ignored. */ public final class TermsSliceQuery extends SliceQuery { + // Fixed seed for computing term hashCode + public static final int SEED = 7919; + public TermsSliceQuery(String field, int id, int max) { super(field, id, max); } @@ -71,7 +75,9 @@ public final class TermsSliceQuery extends SliceQuery { final TermsEnum te = terms.iterator(); PostingsEnum docsEnum = null; for (BytesRef term = te.next(); term != null; term = te.next()) { - int hashCode = term.hashCode(); + // use a fixed seed instead of term.hashCode() otherwise this query may return inconsistent results when + // running on another replica (StringHelper sets its default seed at startup with current time) + int hashCode = StringHelper.murmurhash3_x86_32(term, SEED); if (contains(hashCode)) { docsEnum = te.postings(docsEnum, PostingsEnum.NONE); builder.add(docsEnum); diff --git a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index e1585d708cd..db6177ab36f 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -279,9 +279,7 @@ public class FieldSortBuilder extends SortBuilder { && (sortMode == SortMode.SUM || sortMode == SortMode.AVG || sortMode == SortMode.MEDIAN)) { throw new QueryShardException(context, "we only support AVG, MEDIAN and SUM on number based fields"); } - IndexFieldData.XFieldComparatorSource fieldComparatorSource = fieldData - .comparatorSource(missing, localSortMode, nested); - SortField field = new SortField(fieldType.name(), fieldComparatorSource, reverse); + SortField field = fieldData.sortField(missing, localSortMode, nested, reverse); return new SortFieldAndFormat(field, fieldType.docValueFormat(null, null)); } } diff --git a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java index 6afefec2c94..e6e6bc82173 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -91,7 +91,6 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_INDEX_UUI import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED; -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_MINIMUM_COMPATIBLE; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_UPGRADED; import static org.elasticsearch.common.util.set.Sets.newHashSet; @@ -132,7 +131,6 @@ public class RestoreService extends AbstractComponent implements ClusterStateApp unremovable.add(SETTING_NUMBER_OF_REPLICAS); unremovable.add(SETTING_AUTO_EXPAND_REPLICAS); unremovable.add(SETTING_VERSION_UPGRADED); - unremovable.add(SETTING_VERSION_MINIMUM_COMPATIBLE); UNREMOVABLE_SETTINGS = unmodifiableSet(unremovable); } diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index b68037b8dc6..f72956c4202 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -679,14 +679,23 @@ public class ThreadPool extends AbstractComponent implements Closeable { public static boolean terminate(ExecutorService service, long timeout, TimeUnit timeUnit) { if (service != null) { service.shutdown(); - try { - if (service.awaitTermination(timeout, timeUnit)) { - return true; - } - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } + if (awaitTermination(service, timeout, timeUnit)) return true; service.shutdownNow(); + return awaitTermination(service, timeout, timeUnit); + } + return false; + } + + private static boolean awaitTermination( + final ExecutorService service, + final long timeout, + final TimeUnit timeUnit) { + try { + if (service.awaitTermination(timeout, timeUnit)) { + return true; + } + } catch (final InterruptedException e) { + Thread.currentThread().interrupt(); } return false; } @@ -699,15 +708,10 @@ public class ThreadPool extends AbstractComponent implements Closeable { if (pool != null) { try { pool.shutdown(); - try { - if (pool.awaitTermination(timeout, timeUnit)) { - return true; - } - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } + if (awaitTermination(pool, timeout, timeUnit)) return true; // last resort pool.shutdownNow(); + return awaitTermination(pool, timeout, timeUnit); } finally { IOUtils.closeWhileHandlingException(pool); } @@ -715,6 +719,20 @@ public class ThreadPool extends AbstractComponent implements Closeable { return false; } + private static boolean awaitTermination( + final ThreadPool pool, + final long timeout, + final TimeUnit timeUnit) { + try { + if (pool.awaitTermination(timeout, timeUnit)) { + return true; + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + return false; + } + @Override public void close() throws IOException { threadContext.close(); diff --git a/core/src/main/java/org/elasticsearch/transport/TcpTransport.java b/core/src/main/java/org/elasticsearch/transport/TcpTransport.java index 79b4ff0f9f7..dd75ae29556 100644 --- a/core/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/core/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -26,6 +26,8 @@ import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.NotifyOnceListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.Nullable; @@ -295,19 +297,25 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i DiscoveryNode node = entry.getKey(); NodeChannels channels = entry.getValue(); for (Channel channel : channels.getChannels()) { - try { - sendMessage(channel, pingHeader, successfulPings::inc); - } catch (Exception e) { - if (isOpen(channel)) { - logger.debug( - (Supplier) () -> new ParameterizedMessage("[{}] failed to send ping transport message", node), e); - failedPings.inc(); - } else { - logger.trace( - (Supplier) () -> new ParameterizedMessage( - "[{}] failed to send ping transport message (channel closed)", node), e); + internalSendMessage(channel, pingHeader, new NotifyOnceListener() { + @Override + public void innerOnResponse(Channel channel) { + successfulPings.inc(); } - } + + @Override + public void innerOnFailure(Exception e) { + if (isOpen(channel)) { + logger.debug( + (Supplier) () -> new ParameterizedMessage("[{}] failed to send ping transport message", node), e); + failedPings.inc(); + } else { + logger.trace((Supplier) () -> + new ParameterizedMessage("[{}] failed to send ping transport message (channel closed)", node), e); + } + + } + }); } } } @@ -358,7 +366,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i typeMapping = new EnumMap<>(TransportRequestOptions.Type.class); for (ConnectionProfile.ConnectionTypeHandle handle : connectionProfile.getHandles()) { for (TransportRequestOptions.Type type : handle.getTypes()) - typeMapping.put(type, handle); + typeMapping.put(type, handle); } version = node.getVersion(); } @@ -415,7 +423,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i throw new NodeNotConnectedException(node, "connection already closed"); } Channel channel = channel(options.type()); - sendRequestToChannel(this.node, channel, requestId, action, request, options, getVersion(), (byte)0); + sendRequestToChannel(this.node, channel, requestId, action, request, options, getVersion(), (byte) 0); } } @@ -683,7 +691,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i for (int i = 0; i < hostAddresses.length; i++) { addresses[i] = NetworkAddress.format(hostAddresses[i]); } - logger.debug("binding server bootstrap to: {}", (Object)addresses); + logger.debug("binding server bootstrap to: {}", (Object) addresses); } assert hostAddresses.length > 0; @@ -907,7 +915,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i } } - protected void onException(Channel channel, Exception e) throws IOException { + protected void onException(Channel channel, Exception e) { if (!lifecycle.started()) { // just close and ignore - we are already stopped and just need to make sure we release all resources disconnectFromNodeChannel(channel, e); @@ -940,23 +948,27 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i } else if (e instanceof TcpTransport.HttpOnTransportException) { // in case we are able to return data, serialize the exception content and sent it back to the client if (isOpen(channel)) { - final Runnable closeChannel = () -> { - try { - closeChannels(Collections.singletonList(channel)); - } catch (IOException e1) { - logger.debug("failed to close httpOnTransport channel", e1); + final NotifyOnceListener closeChannel = new NotifyOnceListener() { + @Override + public void innerOnResponse(Channel channel) { + try { + closeChannels(Collections.singletonList(channel)); + } catch (IOException e1) { + logger.debug("failed to close httpOnTransport channel", e1); + } + } + + @Override + public void innerOnFailure(Exception e) { + try { + closeChannels(Collections.singletonList(channel)); + } catch (IOException e1) { + e.addSuppressed(e1); + logger.debug("failed to close httpOnTransport channel", e1); + } } }; - boolean success = false; - try { - sendMessage(channel, new BytesArray(e.getMessage().getBytes(StandardCharsets.UTF_8)), closeChannel); - success = true; - } finally { - if (success == false) { - // it's fine to call this more than once - closeChannel.run(); - } - } + internalSendMessage(channel, new BytesArray(e.getMessage().getBytes(StandardCharsets.UTF_8)), closeChannel); } } else { logger.warn( @@ -973,7 +985,8 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i /** * Binds to the given {@link InetSocketAddress} - * @param name the profile name + * + * @param name the profile name * @param address the address to bind to */ protected abstract Channel bind(String name, InetSocketAddress address) throws IOException; @@ -983,8 +996,14 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i */ protected abstract void closeChannels(List channel) throws IOException; - - protected abstract void sendMessage(Channel channel, BytesReference reference, Runnable sendListener) throws IOException; + /** + * Sends message to channel. The listener's onResponse method will be called when the send is complete unless an exception + * is thrown during the send. If an exception is thrown, the listener's onException method will be called. + * @param channel the destination channel + * @param reference the byte reference for the message + * @param listener the listener to call when the operation has completed + */ + protected abstract void sendMessage(Channel channel, BytesReference reference, ActionListener listener); protected abstract NodeChannels connectToChannels(DiscoveryNode node, ConnectionProfile connectionProfile) throws IOException; @@ -997,8 +1016,8 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i return compress && (!(request instanceof BytesTransportRequest)); } - private void sendRequestToChannel(DiscoveryNode node, final Channel targetChannel, final long requestId, final String action, - final TransportRequest request, TransportRequestOptions options, Version channelVersion, + private void sendRequestToChannel(final DiscoveryNode node, final Channel targetChannel, final long requestId, final String action, + final TransportRequest request, TransportRequestOptions options, Version channelVersion, byte status) throws IOException, TransportException { if (compress) { @@ -1009,7 +1028,6 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i // we wrap this in a release once since if the onRequestSent callback throws an exception // we might release things twice and this should be prevented final Releasable toRelease = Releasables.releaseOnce(() -> Releasables.close(bStream.bytes())); - boolean addedReleaseListener = false; StreamOutput stream = bStream; try { // only compress if asked, and, the request is not bytes, since then only @@ -1029,43 +1047,31 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i stream.writeString(action); BytesReference message = buildMessage(requestId, status, node.getVersion(), request, stream, bStream); final TransportRequestOptions finalOptions = options; - Runnable onRequestSent = () -> { // this might be called in a different thread - try { - toRelease.close(); - } finally { - transportServiceAdapter.onRequestSent(node, requestId, action, request, finalOptions); - } - }; - addedReleaseListener = internalSendMessage(targetChannel, message, onRequestSent); + // this might be called in a different thread + SendListener onRequestSent = new SendListener(toRelease, + () -> transportServiceAdapter.onRequestSent(node, requestId, action, request, finalOptions)); + internalSendMessage(targetChannel, message, onRequestSent); } finally { IOUtils.close(stream); - if (!addedReleaseListener) { - toRelease.close(); - } } } /** - * sends a message view the given channel, using the given callbacks. - * - * @return true if the message was successfully sent or false when an error occurred and the error hanlding logic was activated - * + * sends a message to the given channel, using the given callbacks. */ - private boolean internalSendMessage(Channel targetChannel, BytesReference message, Runnable onRequestSent) throws IOException { - boolean success; + private void internalSendMessage(Channel targetChannel, BytesReference message, NotifyOnceListener listener) { try { - sendMessage(targetChannel, message, onRequestSent); - success = true; - } catch (IOException ex) { - // passing exception handling to deal with this and raise disconnect events and decide the right logging level + sendMessage(targetChannel, message, listener); + } catch (Exception ex) { + // call listener to ensure that any resources are released + listener.onFailure(ex); onException(targetChannel, ex); - success = false; } - return success; } /** * Sends back an error response to the caller via the given channel + * * @param nodeVersion the caller node version * @param channel the channel to send the response to * @param error the error to return @@ -1085,8 +1091,9 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i status = TransportStatus.setError(status); final BytesReference bytes = stream.bytes(); final BytesReference header = buildHeader(requestId, status, nodeVersion, bytes.length()); - Runnable onRequestSent = () -> transportServiceAdapter.onResponseSent(requestId, action, error); - sendMessage(channel, new CompositeBytesReference(header, bytes), onRequestSent); + SendListener onResponseSent = new SendListener(null, + () -> transportServiceAdapter.onResponseSent(requestId, action, error)); + internalSendMessage(channel, new CompositeBytesReference(header, bytes), onResponseSent); } } @@ -1097,7 +1104,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i */ public void sendResponse(Version nodeVersion, Channel channel, final TransportResponse response, final long requestId, final String action, TransportResponseOptions options) throws IOException { - sendResponse(nodeVersion, channel, response, requestId, action, options, (byte)0); + sendResponse(nodeVersion, channel, response, requestId, action, options, (byte) 0); } private void sendResponse(Version nodeVersion, Channel channel, final TransportResponse response, final long requestId, @@ -1110,7 +1117,6 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i // we wrap this in a release once since if the onRequestSent callback throws an exception // we might release things twice and this should be prevented final Releasable toRelease = Releasables.releaseOnce(() -> Releasables.close(bStream.bytes())); - boolean addedReleaseListener = false; StreamOutput stream = bStream; try { if (options.compress()) { @@ -1122,24 +1128,12 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i BytesReference reference = buildMessage(requestId, status, nodeVersion, response, stream, bStream); final TransportResponseOptions finalOptions = options; - Runnable onRequestSent = () -> { // this might be called in a different thread - try { - toRelease.close(); - } finally { - transportServiceAdapter.onResponseSent(requestId, action, response, finalOptions); - } - }; - addedReleaseListener = internalSendMessage(channel, reference, onRequestSent); + // this might be called in a different thread + SendListener listener = new SendListener(toRelease, + () -> transportServiceAdapter.onResponseSent(requestId, action, response, finalOptions)); + internalSendMessage(channel, reference, listener); } finally { - try { - IOUtils.close(stream); - } finally { - if (!addedReleaseListener) { - - toRelease.close(); - } - } - + IOUtils.close(stream); } } @@ -1242,7 +1236,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i // safety against too large frames being sent if (dataLen > NINETY_PER_HEAP_SIZE) { throw new IllegalArgumentException("transport content length received [" + new ByteSizeValue(dataLen) + "] exceeded [" - + new ByteSizeValue(NINETY_PER_HEAP_SIZE) + "]"); + + new ByteSizeValue(NINETY_PER_HEAP_SIZE) + "]"); } if (buffer.length() < dataLen + sizeHeaderLength) { @@ -1254,7 +1248,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i private static boolean bufferStartsWith(BytesReference buffer, int offset, String method) { char[] chars = method.toCharArray(); for (int i = 0; i < chars.length; i++) { - if (buffer.get(offset+ i) != chars[i]) { + if (buffer.get(offset + i) != chars[i]) { return false; } } @@ -1277,7 +1271,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i return RestStatus.BAD_REQUEST; } - public HttpOnTransportException(StreamInput in) throws IOException{ + public HttpOnTransportException(StreamInput in) throws IOException { super(in); } } @@ -1383,7 +1377,8 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i @Override protected void doRun() throws Exception { handler.handleResponse(response); - }}); + } + }); } @@ -1423,7 +1418,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i if (TransportStatus.isHandshake(status)) { final VersionHandshakeResponse response = new VersionHandshakeResponse(getCurrentVersion()); sendResponse(version, channel, response, requestId, HANDSHAKE_ACTION_NAME, TransportResponseOptions.EMPTY, - TransportStatus.setHandshake((byte)0)); + TransportStatus.setHandshake((byte) 0)); } else { final RequestHandlerRegistry reg = transportServiceAdapter.getRequestHandler(action); if (reg == null) { @@ -1552,7 +1547,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i // to as the payload. final Version minCompatVersion = getCurrentVersion().minimumCompatibilityVersion(); sendRequestToChannel(node, channel, requestId, HANDSHAKE_ACTION_NAME, TransportRequest.Empty.INSTANCE, - TransportRequestOptions.EMPTY, minCompatVersion, TransportStatus.setHandshake((byte)0)); + TransportRequestOptions.EMPTY, minCompatVersion, TransportStatus.setHandshake((byte) 0)); if (handler.latch.await(timeout.millis(), TimeUnit.MILLISECONDS) == false) { throw new ConnectTransportException(node, "handshake_timeout[" + timeout + "]"); } @@ -1594,7 +1589,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i protected final void onChannelClosed(Channel channel) { final Optional first = pendingHandshakes.entrySet().stream() .filter((entry) -> entry.getValue().channel == channel).map((e) -> e.getKey()).findFirst(); - if(first.isPresent()) { + if (first.isPresent()) { final Long requestId = first.get(); final HandshakeResponseHandler handler = pendingHandshakes.remove(requestId); if (handler != null) { @@ -1607,6 +1602,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i /** * Ensures this transport is still started / open + * * @throws IllegalStateException if the transport is not started / open */ protected final void ensureOpen() { @@ -1614,4 +1610,28 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i throw new IllegalStateException("transport has been stopped"); } } + + private final class SendListener extends NotifyOnceListener { + private final Releasable optionalReleasable; + private final Runnable transportAdaptorCallback; + + private SendListener(Releasable optionalReleasable, Runnable transportAdaptorCallback) { + this.optionalReleasable = optionalReleasable; + this.transportAdaptorCallback = transportAdaptorCallback; + } + + @Override + public void innerOnResponse(Channel channel) { + release(); + } + + @Override + public void innerOnFailure(Exception e) { + release(); + } + + private void release() { + Releasables.close(optionalReleasable, transportAdaptorCallback::run); + } + } } diff --git a/core/src/main/java/org/elasticsearch/transport/TransportActionProxy.java b/core/src/main/java/org/elasticsearch/transport/TransportActionProxy.java index 30b7299f59d..502ab51e0fa 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportActionProxy.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportActionProxy.java @@ -134,11 +134,12 @@ public final class TransportActionProxy { true, false, new ProxyRequestHandler<>(service, action, responseSupplier)); } + private static final String PROXY_ACTION_PREFIX = "internal:transport/proxy/"; /** * Returns the corresponding proxy action for the given action */ public static String getProxyAction(String action) { - return "internal:transport/proxy/" + action; + return PROXY_ACTION_PREFIX + action; } /** @@ -147,4 +148,14 @@ public final class TransportActionProxy { public static TransportRequest wrapRequest(DiscoveryNode node, TransportRequest request) { return new ProxyRequest<>(request, node); } + + /** + * Unwraps a proxy request and returns the original request + */ + public static TransportRequest unwrapRequest(TransportRequest request) { + if (request instanceof ProxyRequest) { + return ((ProxyRequest)request).wrapped; + } + return request; + } } diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy index 4b393c0d6e7..5c9c56de182 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -31,7 +31,7 @@ grant codeBase "${codebase.securesm-1.1.jar}" { //// Very special jar permissions: //// These are dangerous permissions that we don't want to grant to everything. -grant codeBase "${codebase.lucene-core-6.5.0-snapshot-d00c5ca.jar}" { +grant codeBase "${codebase.lucene-core-6.5.0.jar}" { // needed to allow MMapDirectory's "unmap hack" (die unmap hack, die) // java 8 package permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; @@ -42,7 +42,7 @@ grant codeBase "${codebase.lucene-core-6.5.0-snapshot-d00c5ca.jar}" { permission java.lang.RuntimePermission "accessDeclaredMembers"; }; -grant codeBase "${codebase.lucene-misc-6.5.0-snapshot-d00c5ca.jar}" { +grant codeBase "${codebase.lucene-misc-6.5.0.jar}" { // needed to allow shard shrinking to use hard-links if possible via lucenes HardlinkCopyDirectoryWrapper permission java.nio.file.LinkPermission "hard"; }; diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy index 8769c80b84e..2c39ccb350e 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy @@ -33,7 +33,7 @@ grant codeBase "${codebase.securemock-1.2.jar}" { permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; -grant codeBase "${codebase.lucene-test-framework-6.5.0-snapshot-d00c5ca.jar}" { +grant codeBase "${codebase.lucene-test-framework-6.5.0.jar}" { // needed by RamUsageTester permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; // needed for testing hardlinks in StoreRecoveryTests since we install MockFS diff --git a/core/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java b/core/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java index 1aff2af5235..aef354a0495 100644 --- a/core/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java +++ b/core/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java @@ -198,7 +198,7 @@ public class CollapsingTopDocsCollectorTests extends ESTestCase { subSearcher.search(weight, c); shardHits[shardIDX] = c.getTopDocs(); } - CollapseTopFieldDocs mergedFieldDocs = CollapseTopFieldDocs.merge(sort, 0, expectedNumGroups, shardHits); + CollapseTopFieldDocs mergedFieldDocs = CollapseTopFieldDocs.merge(sort, 0, expectedNumGroups, shardHits, true); assertTopDocsEquals(mergedFieldDocs, collapseTopFieldDocs); w.close(); reader.close(); diff --git a/core/src/test/java/org/apache/lucene/search/uhighlight/BoundedBreakIteratorScannerTests.java b/core/src/test/java/org/apache/lucene/search/uhighlight/BoundedBreakIteratorScannerTests.java index 0cf62e8ce6c..69e41962cc5 100644 --- a/core/src/test/java/org/apache/lucene/search/uhighlight/BoundedBreakIteratorScannerTests.java +++ b/core/src/test/java/org/apache/lucene/search/uhighlight/BoundedBreakIteratorScannerTests.java @@ -43,9 +43,9 @@ public class BoundedBreakIteratorScannerTests extends ESTestCase { String[] vocabulary = new String[maxSize]; for (int i = 0; i < maxSize; i++) { if (rarely()) { - vocabulary[i] = randomAsciiOfLengthBetween(50, 200); + vocabulary[i] = randomAlphaOfLengthBetween(50, 200); } else { - vocabulary[i] = randomAsciiOfLengthBetween(1, 30); + vocabulary[i] = randomAlphaOfLengthBetween(1, 30); } } diff --git a/core/src/test/java/org/elasticsearch/BuildTests.java b/core/src/test/java/org/elasticsearch/BuildTests.java index 26e332a602a..ac5496856f9 100644 --- a/core/src/test/java/org/elasticsearch/BuildTests.java +++ b/core/src/test/java/org/elasticsearch/BuildTests.java @@ -44,7 +44,7 @@ public class BuildTests extends ESTestCase { assertEquals(build, another); assertEquals(build.hashCode(), another.hashCode()); - Build differentHash = new Build(randomAsciiOfLengthBetween(3, 10), build.date(), build.isSnapshot()); + Build differentHash = new Build(randomAlphaOfLengthBetween(3, 10), build.date(), build.isSnapshot()); assertNotEquals(build, differentHash); Build differentDate = new Build(build.shortHash(), "1970-01-01", build.isSnapshot()); diff --git a/core/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java b/core/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java index 5f0c0d063b3..8b4f65ab491 100644 --- a/core/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java +++ b/core/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java @@ -918,7 +918,7 @@ public class ElasticsearchExceptionTests extends ESTestCase { int nbValues = randomIntBetween(1, 3); for (int j = 0; j < nbValues; j++) { - values.add(frequently() ? randomAsciiOfLength(5) : ""); + values.add(frequently() ? randomAlphaOfLength(5) : ""); } randomHeaders.put("header_" + i, values); } @@ -943,7 +943,7 @@ public class ElasticsearchExceptionTests extends ESTestCase { int nbValues = randomIntBetween(1, 3); for (int j = 0; j < nbValues; j++) { - values.add(frequently() ? randomAsciiOfLength(5) : ""); + values.add(frequently() ? randomAlphaOfLength(5) : ""); } randomMetadata.put("es.metadata_" + i, values); } @@ -965,7 +965,7 @@ public class ElasticsearchExceptionTests extends ESTestCase { String resourceType = "type_" + i; String[] resourceIds = new String[randomIntBetween(1, 3)]; for (int j = 0; j < resourceIds.length; j++) { - resourceIds[j] = frequently() ? randomAsciiOfLength(5) : ""; + resourceIds[j] = frequently() ? randomAlphaOfLength(5) : ""; } actualException.setResources(resourceType, resourceIds); expected.setResources(resourceType, resourceIds); diff --git a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 2cd87c0470e..c0430001bb8 100644 --- a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -43,6 +43,7 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper; @@ -645,8 +646,8 @@ public class ExceptionSerializationTests extends ESTestCase { } public void testNoLongerPrimaryShardException() throws IOException { - ShardId shardId = new ShardId(new Index(randomAsciiOfLength(4), randomAsciiOfLength(4)), randomIntBetween(0, Integer.MAX_VALUE)); - String msg = randomAsciiOfLength(4); + ShardId shardId = new ShardId(new Index(randomAlphaOfLength(4), randomAlphaOfLength(4)), randomIntBetween(0, Integer.MAX_VALUE)); + String msg = randomAlphaOfLength(4); ShardStateAction.NoLongerPrimaryShardException ex = serialize(new ShardStateAction.NoLongerPrimaryShardException(shardId, msg)); assertEquals(shardId, ex.getShardId()); assertEquals(msg, ex.getMessage()); @@ -680,15 +681,15 @@ public class ExceptionSerializationTests extends ESTestCase { } public void testThatIdsArePositive() { - for (ElasticsearchException.ElasticsearchExceptionHandle handle : ElasticsearchException.ElasticsearchExceptionHandle.values()) { - assertThat("negative id", handle.id, greaterThanOrEqualTo(0)); + for (final int id : ElasticsearchException.ids()) { + assertThat("negative id", id, greaterThanOrEqualTo(0)); } } public void testThatIdsAreUnique() { - Set ids = new HashSet<>(); - for (ElasticsearchException.ElasticsearchExceptionHandle handle : ElasticsearchException.ElasticsearchExceptionHandle.values()) { - assertTrue("duplicate id", ids.add(handle.id)); + final Set ids = new HashSet<>(); + for (final int id: ElasticsearchException.ids()) { + assertTrue("duplicate id", ids.add(id)); } } @@ -848,8 +849,9 @@ public class ExceptionSerializationTests extends ESTestCase { } } - for (ElasticsearchException.ElasticsearchExceptionHandle handle : ElasticsearchException.ElasticsearchExceptionHandle.values()) { - assertEquals((int) reverse.get(handle.exceptionClass), handle.id); + for (final Tuple> tuple : ElasticsearchException.classes()) { + assertNotNull(tuple.v1()); + assertEquals((int) reverse.get(tuple.v2()), (int)tuple.v1()); } for (Map.Entry> entry : ids.entrySet()) { diff --git a/core/src/test/java/org/elasticsearch/action/NotifyOnceListenerTests.java b/core/src/test/java/org/elasticsearch/action/NotifyOnceListenerTests.java new file mode 100644 index 00000000000..501a40686ed --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/NotifyOnceListenerTests.java @@ -0,0 +1,76 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action; + +import org.elasticsearch.test.ESTestCase; + +import java.util.concurrent.atomic.AtomicReference; + +public class NotifyOnceListenerTests extends ESTestCase { + + public void testWhenSuccessCannotNotifyMultipleTimes() { + AtomicReference response = new AtomicReference<>(); + AtomicReference exception = new AtomicReference<>(); + + NotifyOnceListener listener = new NotifyOnceListener() { + @Override + public void innerOnResponse(String s) { + response.set(s); + } + + @Override + public void innerOnFailure(Exception e) { + exception.set(e); + } + }; + + listener.onResponse("response"); + listener.onResponse("wrong-response"); + listener.onFailure(new RuntimeException()); + + assertNull(exception.get()); + assertEquals("response", response.get()); + } + + public void testWhenErrorCannotNotifyMultipleTimes() { + AtomicReference response = new AtomicReference<>(); + AtomicReference exception = new AtomicReference<>(); + + NotifyOnceListener listener = new NotifyOnceListener() { + @Override + public void innerOnResponse(String s) { + response.set(s); + } + + @Override + public void innerOnFailure(Exception e) { + exception.set(e); + } + }; + + RuntimeException expected = new RuntimeException(); + listener.onFailure(expected); + listener.onFailure(new IllegalArgumentException()); + listener.onResponse("response"); + + assertNull(response.get()); + assertSame(expected, exception.get()); + } +} diff --git a/core/src/test/java/org/elasticsearch/action/OriginalIndicesTests.java b/core/src/test/java/org/elasticsearch/action/OriginalIndicesTests.java index ceba9278a16..c82f184eff4 100644 --- a/core/src/test/java/org/elasticsearch/action/OriginalIndicesTests.java +++ b/core/src/test/java/org/elasticsearch/action/OriginalIndicesTests.java @@ -57,7 +57,7 @@ public class OriginalIndicesTests extends ESTestCase { int numIndices = randomInt(10); String[] indices = new String[numIndices]; for (int j = 0; j < indices.length; j++) { - indices[j] = randomAsciiOfLength(randomIntBetween(1, 10)); + indices[j] = randomAlphaOfLength(randomIntBetween(1, 10)); } IndicesOptions indicesOptions = randomFrom(indicesOptionsValues); return new OriginalIndices(indices, indicesOptions); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequestTests.java index 4067eb1b1ed..0c47caa5466 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequestTests.java @@ -26,8 +26,8 @@ public class ClusterAllocationExplainRequestTests extends ESTestCase { public void testSerialization() throws Exception { ClusterAllocationExplainRequest request = - new ClusterAllocationExplainRequest(randomAsciiOfLength(4), randomIntBetween(0, Integer.MAX_VALUE), randomBoolean(), - randomBoolean() ? randomAsciiOfLength(5) : null); + new ClusterAllocationExplainRequest(randomAlphaOfLength(4), randomIntBetween(0, Integer.MAX_VALUE), randomBoolean(), + randomBoolean() ? randomAlphaOfLength(5) : null); request.includeYesDecisions(randomBoolean()); request.includeDiskInfo(randomBoolean()); BytesStreamOutput output = new BytesStreamOutput(); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java index d3d156ee2cd..0b6b14684f9 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java @@ -290,9 +290,9 @@ public class NodeStatsTests extends ESTestCase { new OsStats.Mem(randomLong(), randomLong()), new OsStats.Swap(randomLong(), randomLong()), new OsStats.Cgroup( - randomAsciiOfLength(8), + randomAlphaOfLength(8), randomNonNegativeLong(), - randomAsciiOfLength(8), + randomAlphaOfLength(8), randomNonNegativeLong(), randomNonNegativeLong(), new OsStats.Cgroup.CpuStat(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()))); @@ -310,14 +310,14 @@ public class NodeStatsTests extends ESTestCase { int numMemoryPools = randomIntBetween(0, 10); List memoryPools = new ArrayList<>(numMemoryPools); for (int i = 0; i < numMemoryPools; i++) { - memoryPools.add(new JvmStats.MemoryPool(randomAsciiOfLengthBetween(3, 10), randomNonNegativeLong(), + memoryPools.add(new JvmStats.MemoryPool(randomAlphaOfLengthBetween(3, 10), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong())); } JvmStats.Threads threads = new JvmStats.Threads(randomIntBetween(1, 1000), randomIntBetween(1, 1000)); int numGarbageCollectors = randomIntBetween(0, 10); JvmStats.GarbageCollector[] garbageCollectorsArray = new JvmStats.GarbageCollector[numGarbageCollectors]; for (int i = 0; i < numGarbageCollectors; i++) { - garbageCollectorsArray[i] = new JvmStats.GarbageCollector(randomAsciiOfLengthBetween(3, 10), + garbageCollectorsArray[i] = new JvmStats.GarbageCollector(randomAlphaOfLengthBetween(3, 10), randomNonNegativeLong(), randomNonNegativeLong()); } JvmStats.GarbageCollectors garbageCollectors = new JvmStats.GarbageCollectors(garbageCollectorsArray); @@ -326,7 +326,7 @@ public class NodeStatsTests extends ESTestCase { for (int i = 0; i < numBufferPools; i++) { bufferPoolList.add( new JvmStats.BufferPool( - randomAsciiOfLengthBetween(3, 10), + randomAlphaOfLengthBetween(3, 10), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong())); @@ -342,7 +342,7 @@ public class NodeStatsTests extends ESTestCase { int numThreadPoolStats = randomIntBetween(0, 10); List threadPoolStatsList = new ArrayList<>(); for (int i = 0; i < numThreadPoolStats; i++) { - threadPoolStatsList.add(new ThreadPoolStats.Stats(randomAsciiOfLengthBetween(3, 10), randomIntBetween(1, 1000), + threadPoolStatsList.add(new ThreadPoolStats.Stats(randomAlphaOfLengthBetween(3, 10), randomIntBetween(1, 1000), randomIntBetween(1, 1000), randomIntBetween(1, 1000), randomNonNegativeLong(), randomIntBetween(1, 1000), randomIntBetween(1, 1000))); } @@ -354,17 +354,17 @@ public class NodeStatsTests extends ESTestCase { FsInfo.DeviceStats[] deviceStatsArray = new FsInfo.DeviceStats[numDeviceStats]; for (int i = 0; i < numDeviceStats; i++) { FsInfo.DeviceStats previousDeviceStats = randomBoolean() ? null : - new FsInfo.DeviceStats(randomInt(), randomInt(), randomAsciiOfLengthBetween(3, 10), + new FsInfo.DeviceStats(randomInt(), randomInt(), randomAlphaOfLengthBetween(3, 10), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), null); deviceStatsArray[i] = - new FsInfo.DeviceStats(randomInt(), randomInt(), randomAsciiOfLengthBetween(3, 10), randomNonNegativeLong(), + new FsInfo.DeviceStats(randomInt(), randomInt(), randomAlphaOfLengthBetween(3, 10), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), previousDeviceStats); } FsInfo.IoStats ioStats = new FsInfo.IoStats(deviceStatsArray); int numPaths = randomIntBetween(0, 10); FsInfo.Path[] paths = new FsInfo.Path[numPaths]; for (int i = 0; i < numPaths; i++) { - paths[i] = new FsInfo.Path(randomAsciiOfLengthBetween(3, 10), randomBoolean() ? randomAsciiOfLengthBetween(3, 10) : null, + paths[i] = new FsInfo.Path(randomAlphaOfLengthBetween(3, 10), randomBoolean() ? randomAlphaOfLengthBetween(3, 10) : null, randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()); } fsInfo = new FsInfo(randomNonNegativeLong(), ioStats, paths); @@ -377,7 +377,7 @@ public class NodeStatsTests extends ESTestCase { int numCircuitBreakerStats = randomIntBetween(0, 10); CircuitBreakerStats[] circuitBreakerStatsArray = new CircuitBreakerStats[numCircuitBreakerStats]; for (int i = 0; i < numCircuitBreakerStats; i++) { - circuitBreakerStatsArray[i] = new CircuitBreakerStats(randomAsciiOfLengthBetween(3, 10), randomNonNegativeLong(), + circuitBreakerStatsArray[i] = new CircuitBreakerStats(randomAlphaOfLengthBetween(3, 10), randomNonNegativeLong(), randomNonNegativeLong(), randomDouble(), randomNonNegativeLong()); } allCircuitBreakerStats = new AllCircuitBreakerStats(circuitBreakerStatsArray); @@ -393,7 +393,7 @@ public class NodeStatsTests extends ESTestCase { int numStatsPerPipeline = randomIntBetween(0, 10); Map statsPerPipeline = new HashMap<>(); for (int i = 0; i < numStatsPerPipeline; i++) { - statsPerPipeline.put(randomAsciiOfLengthBetween(3, 10), new IngestStats.Stats(randomNonNegativeLong(), + statsPerPipeline.put(randomAlphaOfLengthBetween(3, 10), new IngestStats.Stats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong())); } ingestStats = new IngestStats(totalStats, statsPerPipeline); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskTests.java index d3f10f2b038..c5d8b39c3da 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskTests.java @@ -30,7 +30,7 @@ import java.util.Map; public class TaskTests extends ESTestCase { public void testTaskInfoToString() { - String nodeId = randomAsciiOfLength(10); + String nodeId = randomAlphaOfLength(10); long taskId = randomIntBetween(0, 100000); long startTime = randomNonNegativeLong(); long runningTime = randomNonNegativeLong(); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java index 4b531267d3a..77b5ccc09ab 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java @@ -59,16 +59,16 @@ public class ClusterRerouteRequestTests extends ESTestCase { private static final int ROUNDS = 30; private final List> RANDOM_COMMAND_GENERATORS = unmodifiableList( Arrays.> asList( - () -> new AllocateReplicaAllocationCommand(randomAsciiOfLengthBetween(2, 10), between(0, 1000), - randomAsciiOfLengthBetween(2, 10)), - () -> new AllocateEmptyPrimaryAllocationCommand(randomAsciiOfLengthBetween(2, 10), between(0, 1000), - randomAsciiOfLengthBetween(2, 10), randomBoolean()), - () -> new AllocateStalePrimaryAllocationCommand(randomAsciiOfLengthBetween(2, 10), between(0, 1000), - randomAsciiOfLengthBetween(2, 10), randomBoolean()), - () -> new CancelAllocationCommand(randomAsciiOfLengthBetween(2, 10), between(0, 1000), - randomAsciiOfLengthBetween(2, 10), randomBoolean()), - () -> new MoveAllocationCommand(randomAsciiOfLengthBetween(2, 10), between(0, 1000), - randomAsciiOfLengthBetween(2, 10), randomAsciiOfLengthBetween(2, 10)))); + () -> new AllocateReplicaAllocationCommand(randomAlphaOfLengthBetween(2, 10), between(0, 1000), + randomAlphaOfLengthBetween(2, 10)), + () -> new AllocateEmptyPrimaryAllocationCommand(randomAlphaOfLengthBetween(2, 10), between(0, 1000), + randomAlphaOfLengthBetween(2, 10), randomBoolean()), + () -> new AllocateStalePrimaryAllocationCommand(randomAlphaOfLengthBetween(2, 10), between(0, 1000), + randomAlphaOfLengthBetween(2, 10), randomBoolean()), + () -> new CancelAllocationCommand(randomAlphaOfLengthBetween(2, 10), between(0, 1000), + randomAlphaOfLengthBetween(2, 10), randomBoolean()), + () -> new MoveAllocationCommand(randomAlphaOfLengthBetween(2, 10), between(0, 1000), + randomAlphaOfLengthBetween(2, 10), randomAlphaOfLengthBetween(2, 10)))); private final NamedWriteableRegistry namedWriteableRegistry; public ClusterRerouteRequestTests() { diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestTests.java index f5fa046f4c2..e21635596b9 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestTests.java @@ -34,7 +34,7 @@ public class ClusterSearchShardsRequestTests extends ESTestCase { int numIndices = randomIntBetween(1, 5); String[] indices = new String[numIndices]; for (int i = 0; i < numIndices; i++) { - indices[i] = randomAsciiOfLengthBetween(3, 10); + indices[i] = randomAlphaOfLengthBetween(3, 10); } request.indices(indices); } @@ -43,13 +43,13 @@ public class ClusterSearchShardsRequestTests extends ESTestCase { IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())); } if (randomBoolean()) { - request.preference(randomAsciiOfLengthBetween(3, 10)); + request.preference(randomAlphaOfLengthBetween(3, 10)); } if (randomBoolean()) { int numRoutings = randomIntBetween(1, 3); String[] routings = new String[numRoutings]; for (int i = 0; i < numRoutings; i++) { - routings[i] = randomAsciiOfLengthBetween(3, 10); + routings[i] = randomAlphaOfLengthBetween(3, 10); } request.routing(routings); } diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java index 9c5b1ae8944..5181e943c2d 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java @@ -54,9 +54,9 @@ public class ClusterSearchShardsResponseTests extends ESTestCase { int numShards = randomIntBetween(1, 10); ClusterSearchShardsGroup[] clusterSearchShardsGroups = new ClusterSearchShardsGroup[numShards]; for (int i = 0; i < numShards; i++) { - String index = randomAsciiOfLengthBetween(3, 10); - ShardId shardId = new ShardId(index, randomAsciiOfLength(12), i); - String nodeId = randomAsciiOfLength(10); + String index = randomAlphaOfLengthBetween(3, 10); + ShardId shardId = new ShardId(index, randomAlphaOfLength(12), i); + String nodeId = randomAlphaOfLength(10); ShardRouting shardRouting = TestShardRouting.newShardRouting(shardId, nodeId, randomBoolean(), ShardRoutingState.STARTED); clusterSearchShardsGroups[i] = new ClusterSearchShardsGroup(shardId, new ShardRouting[]{shardRouting}); DiscoveryNode node = new DiscoveryNode(shardRouting.currentNodeId(), diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodesTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodesTests.java new file mode 100644 index 00000000000..04edc775a2d --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodesTests.java @@ -0,0 +1,76 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.stats; + +import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; +import java.util.List; + +import static java.util.Collections.emptyList; +import static java.util.Collections.singletonList; +import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; + +public class ClusterStatsNodesTests extends ESTestCase { + + /** + * Test that empty transport/http types are not printed out as part + * of the cluster stats xcontent output. + */ + public void testNetworkTypesToXContent() throws Exception { + ClusterStatsNodes.NetworkTypes stats = new ClusterStatsNodes.NetworkTypes(emptyList()); + assertEquals("{\"transport_types\":{},\"http_types\":{}}", + toXContent(stats, XContentType.JSON, randomBoolean()).utf8ToString()); + + List nodeInfos = singletonList(createNodeInfo("node_0", null, null)); + stats = new ClusterStatsNodes.NetworkTypes(nodeInfos); + assertEquals("{\"transport_types\":{},\"http_types\":{}}", + toXContent(stats, XContentType.JSON, randomBoolean()).utf8ToString()); + + nodeInfos = Arrays.asList(createNodeInfo("node_1", "", ""), + createNodeInfo("node_2", "custom", "custom"), + createNodeInfo("node_3", null, "custom")); + stats = new ClusterStatsNodes.NetworkTypes(nodeInfos); + assertEquals("{" + + "\"transport_types\":{\"custom\":1}," + + "\"http_types\":{\"custom\":2}" + + "}", toXContent(stats, XContentType.JSON, randomBoolean()).utf8ToString()); + } + + private static NodeInfo createNodeInfo(String nodeId, String transportType, String httpType) { + Settings.Builder settings = Settings.builder(); + if (transportType != null) { + settings.put(randomFrom(NetworkModule.TRANSPORT_TYPE_KEY, + NetworkModule.TRANSPORT_TYPE_DEFAULT_KEY), transportType); + } + if (httpType != null) { + settings.put(randomFrom(NetworkModule.HTTP_TYPE_KEY, + NetworkModule.HTTP_TYPE_DEFAULT_KEY), httpType); + } + return new NodeInfo(null, null, + new DiscoveryNode(nodeId, buildNewFakeTransportAddress(), null), + settings.build(), null, null, null, null, null, null, null, null, null); + } +} diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/alias/AliasActionsTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/alias/AliasActionsTests.java index 8cd3f271b7b..4a4aa736332 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/alias/AliasActionsTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/alias/AliasActionsTests.java @@ -47,9 +47,9 @@ public class AliasActionsTests extends ESTestCase { assertEquals("One of [index] or [indices] is required", e.getMessage()); } else { Exception e = expectThrows(IllegalArgumentException.class, - () -> new AliasActions(type).alias(randomAsciiOfLength(5)).validate()); + () -> new AliasActions(type).alias(randomAlphaOfLength(5)).validate()); assertEquals("One of [index] or [indices] is required", e.getMessage()); - e = expectThrows(IllegalArgumentException.class, () -> new AliasActions(type).index(randomAsciiOfLength(5)).validate()); + e = expectThrows(IllegalArgumentException.class, () -> new AliasActions(type).index(randomAlphaOfLength(5)).validate()); assertEquals("One of [alias] or [aliases] is required", e.getMessage()); } } @@ -156,8 +156,8 @@ public class AliasActionsTests extends ESTestCase { } public void testParseAddDefaultRouting() throws IOException { - String index = randomAsciiOfLength(5); - String alias = randomAsciiOfLength(5); + String index = randomAlphaOfLength(5); + String alias = randomAlphaOfLength(5); Object searchRouting = randomRouting(); Object indexRouting = randomRouting(); XContentBuilder b = XContentBuilder.builder(randomFrom(XContentType.values()).xContent()); @@ -217,7 +217,7 @@ public class AliasActionsTests extends ESTestCase { } public void testParseRemoveIndex() throws IOException { - String[] indices = randomBoolean() ? new String[] {randomAsciiOfLength(5)} : generateRandomStringArray(10, 5, false, false); + String[] indices = randomBoolean() ? new String[] {randomAlphaOfLength(5)} : generateRandomStringArray(10, 5, false, false); XContentBuilder b = XContentBuilder.builder(randomFrom(XContentType.values()).xContent()); b.startObject(); { b.startObject("remove_index"); { @@ -243,9 +243,9 @@ public class AliasActionsTests extends ESTestCase { XContentBuilder b = XContentBuilder.builder(randomFrom(XContentType.values()).xContent()); b.startObject(); { b.startObject(randomFrom("add", "remove")); { - b.field("index", randomAsciiOfLength(5)); + b.field("index", randomAlphaOfLength(5)); b.array("indices", generateRandomStringArray(10, 5, false, false)); - b.field("alias", randomAsciiOfLength(5)); + b.field("alias", randomAlphaOfLength(5)); } b.endObject(); } @@ -261,8 +261,8 @@ public class AliasActionsTests extends ESTestCase { XContentBuilder b = XContentBuilder.builder(randomFrom(XContentType.values()).xContent()); b.startObject(); { b.startObject(randomFrom("add", "remove")); { - b.field("index", randomAsciiOfLength(5)); - b.field("alias", randomAsciiOfLength(5)); + b.field("index", randomAlphaOfLength(5)); + b.field("alias", randomAlphaOfLength(5)); b.array("aliases", generateRandomStringArray(10, 5, false, false)); } b.endObject(); @@ -278,27 +278,27 @@ public class AliasActionsTests extends ESTestCase { public void testRoundTrip() throws IOException { AliasActions action = new AliasActions(randomFrom(AliasActions.Type.values())); if (randomBoolean()) { - action.index(randomAsciiOfLength(5)); + action.index(randomAlphaOfLength(5)); } else { action.indices(generateRandomStringArray(5, 5, false, false)); } if (action.actionType() != AliasActions.Type.REMOVE_INDEX) { if (randomBoolean()) { - action.alias(randomAsciiOfLength(5)); + action.alias(randomAlphaOfLength(5)); } else { action.aliases(generateRandomStringArray(5, 5, false, false)); } } if (action.actionType() == AliasActions.Type.ADD) { if (randomBoolean()) { - action.filter(randomAsciiOfLength(10)); + action.filter(randomAlphaOfLength(10)); } if (randomBoolean()) { if (randomBoolean()) { - action.routing(randomAsciiOfLength(5)); + action.routing(randomAlphaOfLength(5)); } else { - action.searchRouting(randomAsciiOfLength(5)); - action.indexRouting(randomAsciiOfLength(5)); + action.searchRouting(randomAlphaOfLength(5)); + action.indexRouting(randomAlphaOfLength(5)); } } } @@ -322,11 +322,11 @@ public class AliasActionsTests extends ESTestCase { if (maxDepth > 0) { value = randomMap(maxDepth - 1); } else { - value = randomAsciiOfLength(5); + value = randomAlphaOfLength(5); } break; case 1: - value = randomAsciiOfLength(5); + value = randomAlphaOfLength(5); break; case 2: value = randomBoolean(); @@ -337,12 +337,12 @@ public class AliasActionsTests extends ESTestCase { default: throw new UnsupportedOperationException(); } - result.put(randomAsciiOfLength(5), value); + result.put(randomAlphaOfLength(5), value); } return result; } private Object randomRouting() { - return randomBoolean() ? randomAsciiOfLength(5) : randomInt(); + return randomBoolean() ? randomAlphaOfLength(5) : randomInt(); } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java index 213616eec68..920ba2e9715 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -31,7 +31,7 @@ import static org.hamcrest.Matchers.equalTo; public class RolloverRequestTests extends ESTestCase { public void testConditionsParsing() throws Exception { - final RolloverRequest request = new RolloverRequest(randomAsciiOfLength(10), randomAsciiOfLength(10)); + final RolloverRequest request = new RolloverRequest(randomAlphaOfLength(10), randomAlphaOfLength(10)); final XContentBuilder builder = XContentFactory.jsonBuilder() .startObject() .startObject("conditions") @@ -56,7 +56,7 @@ public class RolloverRequestTests extends ESTestCase { } public void testParsingWithIndexSettings() throws Exception { - final RolloverRequest request = new RolloverRequest(randomAsciiOfLength(10), randomAsciiOfLength(10)); + final RolloverRequest request = new RolloverRequest(randomAlphaOfLength(10), randomAlphaOfLength(10)); final XContentBuilder builder = XContentFactory.jsonBuilder() .startObject() .startObject("conditions") diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java index 9e80e92a281..9d62bd825f3 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java @@ -34,9 +34,6 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.shard.DocsStats; import org.elasticsearch.test.ESTestCase; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormat; import java.util.HashSet; import java.util.List; @@ -60,7 +57,7 @@ public class TransportRolloverActionTests extends ESTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .build(); - final IndexMetaData metaData = IndexMetaData.builder(randomAsciiOfLength(10)) + final IndexMetaData metaData = IndexMetaData.builder(randomAlphaOfLength(10)) .creationDate(System.currentTimeMillis() - TimeValue.timeValueHours(3).getMillis()) .settings(settings) .build(); @@ -95,9 +92,9 @@ public class TransportRolloverActionTests extends ESTestCase { } public void testCreateUpdateAliasRequest() throws Exception { - String sourceAlias = randomAsciiOfLength(10); - String sourceIndex = randomAsciiOfLength(10); - String targetIndex = randomAsciiOfLength(10); + String sourceAlias = randomAlphaOfLength(10); + String sourceIndex = randomAlphaOfLength(10); + String targetIndex = randomAlphaOfLength(10); final RolloverRequest rolloverRequest = new RolloverRequest(sourceAlias, targetIndex); final IndicesAliasesClusterStateUpdateRequest updateRequest = TransportRolloverAction.prepareRolloverAliasesUpdateRequest(sourceIndex, targetIndex, rolloverRequest); @@ -122,10 +119,10 @@ public class TransportRolloverActionTests extends ESTestCase { } public void testValidation() throws Exception { - String index1 = randomAsciiOfLength(10); - String alias = randomAsciiOfLength(10); - String index2 = randomAsciiOfLength(10); - String aliasWithMultipleIndices = randomAsciiOfLength(10); + String index1 = randomAlphaOfLength(10); + String alias = randomAlphaOfLength(10); + String index2 = randomAlphaOfLength(10); + String aliasWithMultipleIndices = randomAlphaOfLength(10); final Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) @@ -145,24 +142,24 @@ public class TransportRolloverActionTests extends ESTestCase { expectThrows(IllegalArgumentException.class, () -> TransportRolloverAction.validate(metaData, new RolloverRequest(aliasWithMultipleIndices, - randomAsciiOfLength(10)))); + randomAlphaOfLength(10)))); expectThrows(IllegalArgumentException.class, () -> TransportRolloverAction.validate(metaData, new RolloverRequest(randomFrom(index1, index2), - randomAsciiOfLength(10)))); + randomAlphaOfLength(10)))); expectThrows(IllegalArgumentException.class, () -> - TransportRolloverAction.validate(metaData, new RolloverRequest(randomAsciiOfLength(5), - randomAsciiOfLength(10))) + TransportRolloverAction.validate(metaData, new RolloverRequest(randomAlphaOfLength(5), + randomAlphaOfLength(10))) ); - TransportRolloverAction.validate(metaData, new RolloverRequest(alias, randomAsciiOfLength(10))); + TransportRolloverAction.validate(metaData, new RolloverRequest(alias, randomAlphaOfLength(10))); } public void testGenerateRolloverIndexName() throws Exception { - String invalidIndexName = randomAsciiOfLength(10) + "A"; + String invalidIndexName = randomAlphaOfLength(10) + "A"; IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(Settings.EMPTY); expectThrows(IllegalArgumentException.class, () -> TransportRolloverAction.generateRolloverIndexName(invalidIndexName, indexNameExpressionResolver)); int num = randomIntBetween(0, 100); - final String indexPrefix = randomAsciiOfLength(10); + final String indexPrefix = randomAlphaOfLength(10); String indexEndingInNumbers = indexPrefix + "-" + num; assertThat(TransportRolloverAction.generateRolloverIndexName(indexEndingInNumbers, indexNameExpressionResolver), equalTo(indexPrefix + "-" + String.format(Locale.ROOT, "%06d", num + 1))); @@ -175,9 +172,9 @@ public class TransportRolloverActionTests extends ESTestCase { } public void testCreateIndexRequest() throws Exception { - String alias = randomAsciiOfLength(10); - String rolloverIndex = randomAsciiOfLength(10); - final RolloverRequest rolloverRequest = new RolloverRequest(alias, randomAsciiOfLength(10)); + String alias = randomAlphaOfLength(10); + String rolloverIndex = randomAlphaOfLength(10); + final RolloverRequest rolloverRequest = new RolloverRequest(alias, randomAlphaOfLength(10)); final ActiveShardCount activeShardCount = randomBoolean() ? ActiveShardCount.ALL : ActiveShardCount.ONE; rolloverRequest.setWaitForActiveShards(activeShardCount); final Settings settings = Settings.builder() diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportShrinkActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportShrinkActionTests.java index 5446d74911d..b24c8dca79a 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportShrinkActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportShrinkActionTests.java @@ -110,7 +110,7 @@ public class TransportShrinkActionTests extends ESTestCase { } public void testShrinkIndexSettings() { - String indexName = randomAsciiOfLength(10); + String indexName = randomAlphaOfLength(10); // create one that won't fail ClusterState clusterState = ClusterState.builder(createClusterState(indexName, randomIntBetween(2, 10), 0, Settings.builder() diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponseTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponseTests.java index b419be0465b..0f24a520b84 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponseTests.java @@ -32,7 +32,7 @@ public class IndicesStatsResponseTests extends ESTestCase { public void testInvalidLevel() { final IndicesStatsResponse response = new IndicesStatsResponse(); - final String level = randomAsciiOfLength(16); + final String level = randomAlphaOfLength(16); final ToXContent.Params params = new ToXContent.MapParams(Collections.singletonMap("level", level)); final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> response.toXContent(null, params)); assertThat( diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java index e92f6e40ab3..f2d51539c6d 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java @@ -93,9 +93,9 @@ public class BulkItemResponseTests extends ESTestCase { final XContentType xContentType = randomFrom(XContentType.values()); int itemId = randomIntBetween(0, 100); - String index = randomAsciiOfLength(5); - String type = randomAsciiOfLength(5); - String id = randomAsciiOfLength(5); + String index = randomAlphaOfLength(5); + String type = randomAlphaOfLength(5); + String id = randomAlphaOfLength(5); DocWriteRequest.OpType opType = randomFrom(DocWriteRequest.OpType.values()); final Tuple exceptions = randomExceptions(); diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java index 91bdc83bba7..08622b327eb 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java @@ -71,9 +71,9 @@ public class BulkResponseTests extends ESTestCase { bulkItems[i] = new BulkItemResponse(i, opType, randomDocWriteResponses.v1()); expectedBulkItems[i] = new BulkItemResponse(i, opType, randomDocWriteResponses.v2()); } else { - String index = randomAsciiOfLength(5); - String type = randomAsciiOfLength(5); - String id = randomAsciiOfLength(5); + String index = randomAlphaOfLength(5); + String type = randomAlphaOfLength(5); + String id = randomAlphaOfLength(5); Tuple failures = randomExceptions(); diff --git a/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java index ea365974995..a2cd7e9820a 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java @@ -59,7 +59,7 @@ public class RetryTests extends ESTestCase { // Stash some random headers so we can assert that we preserve them bulkClient.threadPool().getThreadContext().stashContext(); expectedHeaders.clear(); - expectedHeaders.put(randomAsciiOfLength(5), randomAsciiOfLength(5)); + expectedHeaders.put(randomAlphaOfLength(5), randomAlphaOfLength(5)); bulkClient.threadPool().getThreadContext().putHeader(expectedHeaders); } diff --git a/core/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java b/core/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java index c30c2ac2f13..a11dea344ed 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java @@ -53,6 +53,8 @@ import org.elasticsearch.index.translog.Translog; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.action.bulk.TransportShardBulkAction; +import org.elasticsearch.action.bulk.MappingUpdatePerformer; +import org.elasticsearch.action.bulk.BulkItemResultHolder; import java.io.IOException; import static org.hamcrest.CoreMatchers.equalTo; @@ -77,26 +79,30 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { public void testShouldExecuteReplicaItem() throws Exception { // Successful index request should be replicated - DocWriteRequest writeRequest = new IndexRequest("index", "type", "id").source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); + DocWriteRequest writeRequest = new IndexRequest("index", "type", "id") + .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); DocWriteResponse response = new IndexResponse(shardId, "type", "id", 1, 1, randomBoolean()); BulkItemRequest request = new BulkItemRequest(0, writeRequest); request.setPrimaryResponse(new BulkItemResponse(0, DocWriteRequest.OpType.INDEX, response)); assertTrue(TransportShardBulkAction.shouldExecuteReplicaItem(request, 0)); // Failed index requests should not be replicated (for now!) - writeRequest = new IndexRequest("index", "type", "id").source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); + writeRequest = new IndexRequest("index", "type", "id") + .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); response = new IndexResponse(shardId, "type", "id", 1, 1, randomBoolean()); request = new BulkItemRequest(0, writeRequest); request.setPrimaryResponse( new BulkItemResponse(0, DocWriteRequest.OpType.INDEX, - new BulkItemResponse.Failure("test", "type", "id", new IllegalArgumentException("i died")))); + new BulkItemResponse.Failure("test", "type", "id", + new IllegalArgumentException("i died")))); assertFalse(TransportShardBulkAction.shouldExecuteReplicaItem(request, 0)); // NOOP requests should not be replicated writeRequest = new UpdateRequest("index", "type", "id"); response = new UpdateResponse(shardId, "type", "id", 1, DocWriteResponse.Result.NOOP); request = new BulkItemRequest(0, writeRequest); - request.setPrimaryResponse(new BulkItemResponse(0, DocWriteRequest.OpType.UPDATE, response)); + request.setPrimaryResponse(new BulkItemResponse(0, DocWriteRequest.OpType.UPDATE, + response)); assertFalse(TransportShardBulkAction.shouldExecuteReplicaItem(request, 0)); } @@ -112,13 +118,15 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { .create(create); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); items[0] = primaryRequest; - BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); + BulkShardRequest bulkShardRequest = + new BulkShardRequest(shardId, RefreshPolicy.NONE, items); Translog.Location location = new Translog.Location(0, 0, 0); UpdateHelper updateHelper = null; - Translog.Location newLocation = TransportShardBulkAction.executeBulkItemRequest(metaData, shard, bulkShardRequest, - location, 0, updateHelper, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer()); + Translog.Location newLocation = TransportShardBulkAction.executeBulkItemRequest(metaData, + shard, bulkShardRequest, location, 0, updateHelper, + threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer()); // Translog should change, since there were no problems assertThat(newLocation, not(location)); @@ -127,7 +135,8 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { assertThat(primaryResponse.getItemId(), equalTo(0)); assertThat(primaryResponse.getId(), equalTo("id")); - assertThat(primaryResponse.getOpType(), equalTo(create ? DocWriteRequest.OpType.CREATE : DocWriteRequest.OpType.INDEX)); + assertThat(primaryResponse.getOpType(), + equalTo(create ? DocWriteRequest.OpType.CREATE : DocWriteRequest.OpType.INDEX)); assertFalse(primaryResponse.isFailed()); // Assert that the document actually made it there @@ -140,8 +149,10 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { items[0] = primaryRequest; bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); - Translog.Location secondLocation = TransportShardBulkAction.executeBulkItemRequest(metaData, shard, bulkShardRequest, - newLocation, 0, updateHelper, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer()); + Translog.Location secondLocation = + TransportShardBulkAction.executeBulkItemRequest( metaData, + shard, bulkShardRequest, newLocation, 0, updateHelper, + threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer()); // Translog should not change, since the document was not indexed due to a version conflict assertThat(secondLocation, equalTo(newLocation)); @@ -177,9 +188,11 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { IndexShard shard = newStartedShard(true); BulkItemRequest[] items = new BulkItemRequest[1]; - DocWriteRequest writeRequest = new IndexRequest("index", "type", "id").source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); + DocWriteRequest writeRequest = new IndexRequest("index", "type", "id") + .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); items[0] = new BulkItemRequest(0, writeRequest); - BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); + BulkShardRequest bulkShardRequest = + new BulkShardRequest(shardId, RefreshPolicy.NONE, items); Translog.Location location = new Translog.Location(0, 0, 0); UpdateHelper updateHelper = null; @@ -188,8 +201,9 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { Exception err = new ReplicationOperation.RetryOnPrimaryException(shardId, "rejection"); try { - TransportShardBulkAction.executeBulkItemRequest(metaData, shard, bulkShardRequest, location, - 0, updateHelper, threadPool::absoluteTimeInMillis, new ThrowingMappingUpdatePerformer(err)); + TransportShardBulkAction.executeBulkItemRequest(metaData, shard, bulkShardRequest, + location, 0, updateHelper, threadPool::absoluteTimeInMillis, + new ThrowingMappingUpdatePerformer(err)); fail("should have thrown a retry exception"); } catch (ReplicationOperation.RetryOnPrimaryException e) { assertThat(e, equalTo(err)); @@ -203,9 +217,11 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { IndexShard shard = newStartedShard(true); BulkItemRequest[] items = new BulkItemRequest[1]; - DocWriteRequest writeRequest = new IndexRequest("index", "type", "id").source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); + DocWriteRequest writeRequest = new IndexRequest("index", "type", "id") + .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); items[0] = new BulkItemRequest(0, writeRequest); - BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); + BulkShardRequest bulkShardRequest = + new BulkShardRequest(shardId, RefreshPolicy.NONE, items); Translog.Location location = new Translog.Location(0, 0, 0); UpdateHelper updateHelper = null; @@ -213,8 +229,9 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { // Return a mapping conflict (IAE) when trying to update the mapping Exception err = new IllegalArgumentException("mapping conflict"); - Translog.Location newLocation = TransportShardBulkAction.executeBulkItemRequest(metaData, shard, bulkShardRequest, - location, 0, updateHelper, threadPool::absoluteTimeInMillis, new FailingMappingUpdatePerformer(err)); + Translog.Location newLocation = TransportShardBulkAction.executeBulkItemRequest(metaData, + shard, bulkShardRequest, location, 0, updateHelper, + threadPool::absoluteTimeInMillis, new ThrowingMappingUpdatePerformer(err)); // Translog shouldn't change, as there were conflicting mappings assertThat(newLocation, equalTo(location)); @@ -245,13 +262,15 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { BulkItemRequest[] items = new BulkItemRequest[1]; DocWriteRequest writeRequest = new DeleteRequest("index", "type", "id"); items[0] = new BulkItemRequest(0, writeRequest); - BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); + BulkShardRequest bulkShardRequest = + new BulkShardRequest(shardId, RefreshPolicy.NONE, items); Translog.Location location = new Translog.Location(0, 0, 0); UpdateHelper updateHelper = null; - Translog.Location newLocation = TransportShardBulkAction.executeBulkItemRequest(metaData, shard, bulkShardRequest, - location, 0, updateHelper, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer()); + Translog.Location newLocation = TransportShardBulkAction.executeBulkItemRequest(metaData, + shard, bulkShardRequest, location, 0, updateHelper, + threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer()); // Translog changes, even though the document didn't exist assertThat(newLocation, not(location)); @@ -288,8 +307,9 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { location = newLocation; - newLocation = TransportShardBulkAction.executeBulkItemRequest(metaData, shard, bulkShardRequest, - location, 0, updateHelper, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer()); + newLocation = TransportShardBulkAction.executeBulkItemRequest(metaData, shard, + bulkShardRequest, location, 0, updateHelper, threadPool::absoluteTimeInMillis, + new NoopMappingUpdatePerformer()); // Translog changes, because the document was deleted assertThat(newLocation, not(location)); @@ -322,19 +342,25 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { } public void testNoopUpdateReplicaRequest() throws Exception { - DocWriteRequest writeRequest = new IndexRequest("index", "type", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); + DocWriteRequest writeRequest = new IndexRequest("index", "type", "id") + .source(Requests.INDEX_CONTENT_TYPE, "field", "value"); BulkItemRequest replicaRequest = new BulkItemRequest(0, writeRequest); - DocWriteResponse noopUpdateResponse = new UpdateResponse(shardId, "index", "id", 0, DocWriteResponse.Result.NOOP); - BulkItemResultHolder noopResults = new BulkItemResultHolder(noopUpdateResponse, null, replicaRequest); + DocWriteResponse noopUpdateResponse = new UpdateResponse(shardId, "index", "id", 0, + DocWriteResponse.Result.NOOP); + BulkItemResultHolder noopResults = new BulkItemResultHolder(noopUpdateResponse, null, + replicaRequest); Translog.Location location = new Translog.Location(0, 0, 0); BulkItemRequest[] items = new BulkItemRequest[0]; - BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); - Translog.Location newLocation = TransportShardBulkAction.updateReplicaRequest(noopResults, - DocWriteRequest.OpType.UPDATE, location, bulkShardRequest); + BulkShardRequest bulkShardRequest = + new BulkShardRequest(shardId, RefreshPolicy.NONE, items); - BulkItemResponse primaryResponse = replicaRequest.getPrimaryResponse(); + BulkItemResponse primaryResponse = TransportShardBulkAction.createPrimaryResponse( + noopResults, DocWriteRequest.OpType.UPDATE, bulkShardRequest); + + Translog.Location newLocation = + TransportShardBulkAction.calculateTranslogLocation(location, noopResults); // Basically nothing changes in the request since it's a noop assertThat(newLocation, equalTo(location)); @@ -342,24 +368,30 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { assertThat(primaryResponse.getId(), equalTo("id")); assertThat(primaryResponse.getOpType(), equalTo(DocWriteRequest.OpType.UPDATE)); assertThat(primaryResponse.getResponse(), equalTo(noopUpdateResponse)); - assertThat(primaryResponse.getResponse().getResult(), equalTo(DocWriteResponse.Result.NOOP)); + assertThat(primaryResponse.getResponse().getResult(), + equalTo(DocWriteResponse.Result.NOOP)); } public void testUpdateReplicaRequestWithFailure() throws Exception { - DocWriteRequest writeRequest = new IndexRequest("index", "type", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); + DocWriteRequest writeRequest = new IndexRequest("index", "type", "id") + .source(Requests.INDEX_CONTENT_TYPE, "field", "value"); BulkItemRequest replicaRequest = new BulkItemRequest(0, writeRequest); Exception err = new ElasticsearchException("I'm dead <(x.x)>"); Engine.IndexResult indexResult = new Engine.IndexResult(err, 0, 0); - BulkItemResultHolder failedResults = new BulkItemResultHolder(null, indexResult, replicaRequest); + BulkItemResultHolder failedResults = new BulkItemResultHolder(null, indexResult, + replicaRequest); Translog.Location location = new Translog.Location(0, 0, 0); BulkItemRequest[] items = new BulkItemRequest[0]; - BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); - Translog.Location newLocation = TransportShardBulkAction.updateReplicaRequest(failedResults, - DocWriteRequest.OpType.UPDATE, location, bulkShardRequest); + BulkShardRequest bulkShardRequest = + new BulkShardRequest(shardId, RefreshPolicy.NONE, items); + BulkItemResponse primaryResponse = + TransportShardBulkAction.createPrimaryResponse( + failedResults, DocWriteRequest.OpType.UPDATE, bulkShardRequest); - BulkItemResponse primaryResponse = replicaRequest.getPrimaryResponse(); + Translog.Location newLocation = + TransportShardBulkAction.calculateTranslogLocation(location, failedResults); // Since this was not a conflict failure, the primary response // should be filled out with the failure information @@ -378,20 +410,26 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { } public void testUpdateReplicaRequestWithConflictFailure() throws Exception { - DocWriteRequest writeRequest = new IndexRequest("index", "type", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); + DocWriteRequest writeRequest = new IndexRequest("index", "type", "id") + .source(Requests.INDEX_CONTENT_TYPE, "field", "value"); BulkItemRequest replicaRequest = new BulkItemRequest(0, writeRequest); - Exception err = new VersionConflictEngineException(shardId, "type", "id", "I'm conflicted <(;_;)>"); + Exception err = new VersionConflictEngineException(shardId, "type", "id", + "I'm conflicted <(;_;)>"); Engine.IndexResult indexResult = new Engine.IndexResult(err, 0, 0); - BulkItemResultHolder failedResults = new BulkItemResultHolder(null, indexResult, replicaRequest); + BulkItemResultHolder failedResults = new BulkItemResultHolder(null, indexResult, + replicaRequest); Translog.Location location = new Translog.Location(0, 0, 0); BulkItemRequest[] items = new BulkItemRequest[0]; - BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); - Translog.Location newLocation = TransportShardBulkAction.updateReplicaRequest(failedResults, - DocWriteRequest.OpType.UPDATE, location, bulkShardRequest); + BulkShardRequest bulkShardRequest = + new BulkShardRequest(shardId, RefreshPolicy.NONE, items); + BulkItemResponse primaryResponse = + TransportShardBulkAction.createPrimaryResponse( + failedResults, DocWriteRequest.OpType.UPDATE, bulkShardRequest); - BulkItemResponse primaryResponse = replicaRequest.getPrimaryResponse(); + Translog.Location newLocation = + TransportShardBulkAction.calculateTranslogLocation(location, failedResults); // Since this was not a conflict failure, the primary response // should be filled out with the failure information @@ -410,22 +448,27 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { } public void testUpdateReplicaRequestWithSuccess() throws Exception { - DocWriteRequest writeRequest = new IndexRequest("index", "type", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); + DocWriteRequest writeRequest = new IndexRequest("index", "type", "id") + .source(Requests.INDEX_CONTENT_TYPE, "field", "value"); BulkItemRequest replicaRequest = new BulkItemRequest(0, writeRequest); boolean created = randomBoolean(); Translog.Location resultLocation = new Translog.Location(42, 42, 42); Engine.IndexResult indexResult = new FakeResult(1, 1, created, resultLocation); DocWriteResponse indexResponse = new IndexResponse(shardId, "index", "id", 1, 1, created); - BulkItemResultHolder goodResults = new BulkItemResultHolder(indexResponse, indexResult, replicaRequest); + BulkItemResultHolder goodResults = + new BulkItemResultHolder(indexResponse, indexResult, replicaRequest); Translog.Location originalLocation = new Translog.Location(21, 21, 21); BulkItemRequest[] items = new BulkItemRequest[0]; - BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); - Translog.Location newLocation = TransportShardBulkAction.updateReplicaRequest(goodResults, - DocWriteRequest.OpType.INDEX, originalLocation, bulkShardRequest); + BulkShardRequest bulkShardRequest = + new BulkShardRequest(shardId, RefreshPolicy.NONE, items); + BulkItemResponse primaryResponse = + TransportShardBulkAction.createPrimaryResponse( + goodResults, DocWriteRequest.OpType.INDEX, bulkShardRequest); - BulkItemResponse primaryResponse = replicaRequest.getPrimaryResponse(); + Translog.Location newLocation = + TransportShardBulkAction.calculateTranslogLocation(originalLocation, goodResults); // Check that the translog is successfully advanced assertThat(newLocation, equalTo(resultLocation)); @@ -438,6 +481,61 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { assertThat(response.status(), equalTo(created ? RestStatus.CREATED : RestStatus.OK)); } + public void testCalculateTranslogLocation() throws Exception { + final Translog.Location original = new Translog.Location(0, 0, 0); + + DocWriteRequest writeRequest = new IndexRequest("index", "type", "id") + .source(Requests.INDEX_CONTENT_TYPE, "field", "value"); + BulkItemRequest replicaRequest = new BulkItemRequest(0, writeRequest); + BulkItemResultHolder results = new BulkItemResultHolder(null, null, replicaRequest); + + assertThat(TransportShardBulkAction.calculateTranslogLocation(original, results), + equalTo(original)); + + boolean created = randomBoolean(); + DocWriteResponse indexResponse = new IndexResponse(shardId, "index", "id", 1, 1, created); + Translog.Location newLocation = new Translog.Location(1, 1, 1); + Engine.IndexResult indexResult = new IndexResultWithLocation(randomNonNegativeLong(), + randomNonNegativeLong(), created, newLocation); + results = new BulkItemResultHolder(indexResponse, indexResult, replicaRequest); + assertThat(TransportShardBulkAction.calculateTranslogLocation(original, results), + equalTo(newLocation)); + + } + + public class IndexResultWithLocation extends Engine.IndexResult { + private final Translog.Location location; + public IndexResultWithLocation(long version, long seqNo, boolean created, + Translog.Location newLocation) { + super(version, seqNo, created); + this.location = newLocation; + } + + @Override + public Translog.Location getTranslogLocation() { + return this.location; + } + } + + public void testPrepareIndexOpOnReplica() throws Exception { + IndexMetaData metaData = indexMetaData(); + IndexShard shard = newStartedShard(false); + + DocWriteResponse primaryResponse = new IndexResponse(shardId, "index", "id", + 1, 1, randomBoolean()); + IndexRequest request = new IndexRequest("index", "type", "id") + .source(Requests.INDEX_CONTENT_TYPE, "field", "value"); + + Engine.Index op = TransportShardBulkAction.prepareIndexOperationOnReplica( + primaryResponse, request, shard); + + assertThat(op.version(), equalTo(primaryResponse.getVersion())); + assertThat(op.seqNo(), equalTo(primaryResponse.getSeqNo())); + assertThat(op.versionType(), equalTo(VersionType.EXTERNAL)); + + closeShards(shard); + } + /** * Fake IndexResult that has a settable translog location */ @@ -445,7 +543,8 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { private final Translog.Location location; - protected FakeResult(long version, long seqNo, boolean created, Translog.Location location) { + protected FakeResult(long version, long seqNo, boolean created, + Translog.Location location) { super(version, seqNo, created); this.location = location; } @@ -458,23 +557,12 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { /** Doesn't perform any mapping updates */ public static class NoopMappingUpdatePerformer implements MappingUpdatePerformer { - public MappingUpdatePerformer.MappingUpdateResult updateMappingsIfNeeded(IndexShard primary, - IndexRequest request) throws Exception { - Engine.Index operation = TransportShardBulkAction.prepareIndexOperationOnPrimary(request, primary); - return new MappingUpdatePerformer.MappingUpdateResult(operation); - } - } - - /** Always returns the given failure */ - private class FailingMappingUpdatePerformer implements MappingUpdatePerformer { - private final Exception e; - FailingMappingUpdatePerformer(Exception e) { - this.e = e; + public void updateMappingsIfNeeded(Engine.Index operation, + ShardId shardId, + String type) throws Exception { } - public MappingUpdatePerformer.MappingUpdateResult updateMappingsIfNeeded(IndexShard primary, - IndexRequest request) throws Exception { - return new MappingUpdatePerformer.MappingUpdateResult(e); + public void verifyMappings(Engine.Index operation, ShardId shardId) throws Exception { } } @@ -485,8 +573,30 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { this.e = e; } - public MappingUpdatePerformer.MappingUpdateResult updateMappingsIfNeeded(IndexShard primary, - IndexRequest request) throws Exception { + public void updateMappingsIfNeeded(Engine.Index operation, + ShardId shardId, + String type) throws Exception { + throw e; + } + + public void verifyMappings(Engine.Index operation, ShardId shardId) throws Exception { + fail("should not have gotten to this point"); + } + } + + /** Always throw the given exception */ + private class ThrowingVerifyingMappingUpdatePerformer implements MappingUpdatePerformer { + private final Exception e; + ThrowingVerifyingMappingUpdatePerformer(Exception e) { + this.e = e; + } + + public void updateMappingsIfNeeded(Engine.Index operation, + ShardId shardId, + String type) throws Exception { + } + + public void verifyMappings(Engine.Index operation, ShardId shardId) throws Exception { throw e; } } diff --git a/core/src/test/java/org/elasticsearch/action/bulk/byscroll/BulkByScrollParallelizationHelperTests.java b/core/src/test/java/org/elasticsearch/action/bulk/byscroll/BulkByScrollParallelizationHelperTests.java index e9291aa381d..5a12d54de04 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/byscroll/BulkByScrollParallelizationHelperTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/byscroll/BulkByScrollParallelizationHelperTests.java @@ -44,7 +44,7 @@ public class BulkByScrollParallelizationHelperTests extends ESTestCase { searchRequest.source().slice(null); } int times = between(2, 100); - String field = randomBoolean() ? UidFieldMapper.NAME : randomAsciiOfLength(5); + String field = randomBoolean() ? UidFieldMapper.NAME : randomAlphaOfLength(5); int currentSliceId = 0; for (SearchRequest slice : sliceIntoSubRequests(searchRequest, field, times)) { assertEquals(field, slice.source().slice().getField()); diff --git a/core/src/test/java/org/elasticsearch/action/bulk/byscroll/BulkByScrollResponseTests.java b/core/src/test/java/org/elasticsearch/action/bulk/byscroll/BulkByScrollResponseTests.java index bb5d8bf26f6..1d2146c1515 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/byscroll/BulkByScrollResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/byscroll/BulkByScrollResponseTests.java @@ -64,9 +64,9 @@ public class BulkByScrollResponseTests extends ESTestCase { Integer shardId = null; String nodeId = null; if (randomBoolean()) { - index = randomAsciiOfLength(5); + index = randomAlphaOfLength(5); shardId = randomInt(); - nodeId = usually() ? randomAsciiOfLength(5) : null; + nodeId = usually() ? randomAlphaOfLength(5) : null; } return singletonList(new SearchFailure(new ElasticsearchException("foo"), index, shardId, nodeId)); } diff --git a/core/src/test/java/org/elasticsearch/action/bulk/byscroll/BulkByScrollTaskStatusTests.java b/core/src/test/java/org/elasticsearch/action/bulk/byscroll/BulkByScrollTaskStatusTests.java index b621264e7ea..503fe1db7cd 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/byscroll/BulkByScrollTaskStatusTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/byscroll/BulkByScrollTaskStatusTests.java @@ -105,7 +105,7 @@ public class BulkByScrollTaskStatusTests extends ESTestCase { return null; } if (randomBoolean()) { - return new BulkByScrollTask.StatusOrException(new ElasticsearchException(randomAsciiOfLength(5))); + return new BulkByScrollTask.StatusOrException(new ElasticsearchException(randomAlphaOfLength(5))); } return new BulkByScrollTask.StatusOrException(randomWorkingStatus(i)); }) diff --git a/core/src/test/java/org/elasticsearch/action/bulk/byscroll/BulkByScrollTaskTests.java b/core/src/test/java/org/elasticsearch/action/bulk/byscroll/BulkByScrollTaskTests.java index 0d353e426ec..ff0eae55520 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/byscroll/BulkByScrollTaskTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/byscroll/BulkByScrollTaskTests.java @@ -19,13 +19,11 @@ package org.elasticsearch.action.bulk.byscroll; -import org.elasticsearch.action.bulk.byscroll.BulkByScrollTask; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.util.Arrays; @@ -139,7 +137,7 @@ public class BulkByScrollTaskTests extends ESTestCase { mergedRequestsPerSecond += requestsPerSecond; mergedThrottledUntil = timeValueNanos(min(mergedThrottledUntil.nanos(), throttledUntil.nanos())); } - String reasonCancelled = randomBoolean() ? randomAsciiOfLength(10) : null; + String reasonCancelled = randomBoolean() ? randomAlphaOfLength(10) : null; BulkByScrollTask.Status merged = new BulkByScrollTask.Status(Arrays.asList(statuses), reasonCancelled); assertEquals(mergedTotal, merged.getTotal()); assertEquals(mergedUpdated, merged.getUpdated()); diff --git a/core/src/test/java/org/elasticsearch/action/bulk/byscroll/DeleteByQueryRequestTests.java b/core/src/test/java/org/elasticsearch/action/bulk/byscroll/DeleteByQueryRequestTests.java index 373ee7ab53c..f5c00f63de9 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/byscroll/DeleteByQueryRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/byscroll/DeleteByQueryRequestTests.java @@ -19,10 +19,16 @@ package org.elasticsearch.action.bulk.byscroll; +import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.index.query.QueryBuilders; import static org.apache.lucene.util.TestUtil.randomSimpleString; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; public class DeleteByQueryRequestTests extends AbstractBulkByScrollRequestTestCase { public void testDeleteteByQueryRequestImplementsIndicesRequestReplaceable() { @@ -60,7 +66,7 @@ public class DeleteByQueryRequestTests extends AbstractBulkByScrollRequestTestCa @Override protected DeleteByQueryRequest newRequest() { - return new DeleteByQueryRequest(new SearchRequest(randomAsciiOfLength(5))); + return new DeleteByQueryRequest(new SearchRequest(randomAlphaOfLength(5))); } @Override @@ -96,4 +102,26 @@ public class DeleteByQueryRequestTests extends AbstractBulkByScrollRequestTestCa request.types(types); assertArrayEquals(request.types(), types); } + + public void testValidateGivenNoQuery() { + SearchRequest searchRequest = new SearchRequest(); + DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(searchRequest); + deleteByQueryRequest.indices("*"); + + ActionRequestValidationException e = deleteByQueryRequest.validate(); + + assertThat(e, is(not(nullValue()))); + assertThat(e.getMessage(), containsString("query is missing")); + } + + public void testValidateGivenValid() { + SearchRequest searchRequest = new SearchRequest(); + DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(searchRequest); + deleteByQueryRequest.indices("*"); + searchRequest.source().query(QueryBuilders.matchAllQuery()); + + ActionRequestValidationException e = deleteByQueryRequest.validate(); + + assertThat(e, is(nullValue())); + } } diff --git a/core/src/test/java/org/elasticsearch/action/delete/DeleteResponseTests.java b/core/src/test/java/org/elasticsearch/action/delete/DeleteResponseTests.java index 9ff1aabb844..cbf514375c2 100644 --- a/core/src/test/java/org/elasticsearch/action/delete/DeleteResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/delete/DeleteResponseTests.java @@ -91,11 +91,11 @@ public class DeleteResponseTests extends ESTestCase { * expected {@link DeleteResponse} after parsing. */ public static Tuple randomDeleteResponse() { - String index = randomAsciiOfLength(5); - String indexUUid = randomAsciiOfLength(5); + String index = randomAlphaOfLength(5); + String indexUUid = randomAlphaOfLength(5); int shardId = randomIntBetween(0, 5); - String type = randomAsciiOfLength(5); - String id = randomAsciiOfLength(5); + String type = randomAlphaOfLength(5); + String id = randomAlphaOfLength(5); long seqNo = randomFrom(SequenceNumbersService.UNASSIGNED_SEQ_NO, randomNonNegativeLong(), (long) randomIntBetween(0, 10000)); long version = randomBoolean() ? randomNonNegativeLong() : randomIntBetween(0, 10000); boolean found = randomBoolean(); diff --git a/core/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java b/core/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java new file mode 100644 index 00000000000..6b68112d5d5 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.fieldcaps; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +public class FieldCapabilitiesRequestTests extends ESTestCase { + private FieldCapabilitiesRequest randomRequest() { + FieldCapabilitiesRequest request = new FieldCapabilitiesRequest(); + int size = randomIntBetween(1, 20); + String[] randomFields = new String[size]; + for (int i = 0; i < size; i++) { + randomFields[i] = randomAlphaOfLengthBetween(5, 10); + } + request.fields(randomFields); + return request; + } + + public void testFieldCapsRequestSerialization() throws IOException { + for (int i = 0; i < 20; i++) { + FieldCapabilitiesRequest request = randomRequest(); + BytesStreamOutput output = new BytesStreamOutput(); + request.writeTo(output); + output.flush(); + StreamInput input = output.bytes().streamInput(); + FieldCapabilitiesRequest deserialized = new FieldCapabilitiesRequest(); + deserialized.readFrom(input); + assertEquals(deserialized, request); + assertEquals(deserialized.hashCode(), request.hashCode()); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java b/core/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java new file mode 100644 index 00000000000..2eaf1d4832f --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java @@ -0,0 +1,60 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.fieldcaps; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +public class FieldCapabilitiesResponseTests extends ESTestCase { + private FieldCapabilitiesResponse randomResponse() { + Map > fieldMap = new HashMap<> (); + int numFields = randomInt(10); + for (int i = 0; i < numFields; i++) { + String fieldName = randomAlphaOfLengthBetween(5, 10); + int numIndices = randomIntBetween(1, 5); + Map indexFieldMap = new HashMap<> (); + for (int j = 0; j < numIndices; j++) { + String index = randomAlphaOfLengthBetween(10, 20); + indexFieldMap.put(index, FieldCapabilitiesTests.randomFieldCaps()); + } + fieldMap.put(fieldName, indexFieldMap); + } + return new FieldCapabilitiesResponse(fieldMap); + } + + public void testSerialization() throws IOException { + for (int i = 0; i < 20; i++) { + FieldCapabilitiesResponse response = randomResponse(); + BytesStreamOutput output = new BytesStreamOutput(); + response.writeTo(output); + output.flush(); + StreamInput input = output.bytes().streamInput(); + FieldCapabilitiesResponse deserialized = new FieldCapabilitiesResponse(); + deserialized.readFrom(input); + assertEquals(deserialized, response); + assertEquals(deserialized.hashCode(), response.hashCode()); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesTests.java b/core/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesTests.java new file mode 100644 index 00000000000..b485d51bd6a --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesTests.java @@ -0,0 +1,109 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.fieldcaps; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class FieldCapabilitiesTests extends AbstractWireSerializingTestCase { + @Override + protected FieldCapabilities createTestInstance() { + return randomFieldCaps(); + } + + @Override + protected Writeable.Reader instanceReader() { + return FieldCapabilities::new; + } + + public void testBuilder() { + FieldCapabilities.Builder builder = new FieldCapabilities.Builder("field", "type"); + builder.add("index1", true, false); + builder.add("index2", true, false); + builder.add("index3", true, false); + + { + FieldCapabilities cap1 = builder.build(false); + assertThat(cap1.isSearchable(), equalTo(true)); + assertThat(cap1.isAggregatable(), equalTo(false)); + assertNull(cap1.indices()); + assertNull(cap1.nonSearchableIndices()); + assertNull(cap1.nonAggregatableIndices()); + + FieldCapabilities cap2 = builder.build(true); + assertThat(cap2.isSearchable(), equalTo(true)); + assertThat(cap2.isAggregatable(), equalTo(false)); + assertThat(cap2.indices().length, equalTo(3)); + assertThat(cap2.indices(), equalTo(new String[]{"index1", "index2", "index3"})); + assertNull(cap2.nonSearchableIndices()); + assertNull(cap2.nonAggregatableIndices()); + } + + builder = new FieldCapabilities.Builder("field", "type"); + builder.add("index1", false, true); + builder.add("index2", true, false); + builder.add("index3", false, false); + { + FieldCapabilities cap1 = builder.build(false); + assertThat(cap1.isSearchable(), equalTo(false)); + assertThat(cap1.isAggregatable(), equalTo(false)); + assertNull(cap1.indices()); + assertThat(cap1.nonSearchableIndices(), equalTo(new String[]{"index1", "index3"})); + assertThat(cap1.nonAggregatableIndices(), equalTo(new String[]{"index2", "index3"})); + + FieldCapabilities cap2 = builder.build(true); + assertThat(cap2.isSearchable(), equalTo(false)); + assertThat(cap2.isAggregatable(), equalTo(false)); + assertThat(cap2.indices().length, equalTo(3)); + assertThat(cap2.indices(), equalTo(new String[]{"index1", "index2", "index3"})); + assertThat(cap1.nonSearchableIndices(), equalTo(new String[]{"index1", "index3"})); + assertThat(cap1.nonAggregatableIndices(), equalTo(new String[]{"index2", "index3"})); + } + } + + static FieldCapabilities randomFieldCaps() { + String[] indices = null; + if (randomBoolean()) { + indices = new String[randomIntBetween(1, 5)]; + for (int i = 0; i < indices.length; i++) { + indices[i] = randomAlphaOfLengthBetween(5, 20); + } + } + String[] nonSearchableIndices = null; + if (randomBoolean()) { + nonSearchableIndices = new String[randomIntBetween(0, 5)]; + for (int i = 0; i < nonSearchableIndices.length; i++) { + nonSearchableIndices[i] = randomAlphaOfLengthBetween(5, 20); + } + } + String[] nonAggregatableIndices = null; + if (randomBoolean()) { + nonAggregatableIndices = new String[randomIntBetween(0, 5)]; + for (int i = 0; i < nonAggregatableIndices.length; i++) { + nonAggregatableIndices[i] = randomAlphaOfLengthBetween(5, 20); + } + } + return new FieldCapabilities(randomAlphaOfLengthBetween(5, 20), + randomAlphaOfLengthBetween(5, 20), randomBoolean(), randomBoolean(), + indices, nonSearchableIndices, nonAggregatableIndices); + } +} diff --git a/core/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java b/core/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java index 6c79b20774c..73c77d06292 100644 --- a/core/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java @@ -19,13 +19,78 @@ package org.elasticsearch.action.get; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.test.ESTestCase; +import java.io.IOException; + +import static org.hamcrest.Matchers.containsString; + public class MultiGetRequestTests extends ESTestCase { + public void testAddWithInvalidKey() throws IOException { + final XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + { + builder.startArray("doc"); + { + builder.startObject(); + { + builder.field("_type", "type"); + builder.field("_id", "1"); + } + builder.endObject(); + } + builder.endArray(); + } + builder.endObject(); + final XContentParser parser = createParser(builder); + final MultiGetRequest mgr = new MultiGetRequest(); + final ParsingException e = expectThrows( + ParsingException.class, + () -> { + final String defaultIndex = randomAlphaOfLength(5); + final String defaultType = randomAlphaOfLength(3); + final FetchSourceContext fetchSource = FetchSourceContext.FETCH_SOURCE; + mgr.add(defaultIndex, defaultType, null, fetchSource, null, parser, true); + }); + assertThat( + e.toString(), + containsString("unknown key [doc] for a START_ARRAY, expected [docs] or [ids]")); + } + + public void testUnexpectedField() throws IOException { + final XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + { + builder.startObject("docs"); + { + builder.field("_type", "type"); + builder.field("_id", "1"); + } + builder.endObject(); + } + builder.endObject(); + final XContentParser parser = createParser(builder); + final MultiGetRequest mgr = new MultiGetRequest(); + final ParsingException e = expectThrows( + ParsingException.class, + () -> { + final String defaultIndex = randomAlphaOfLength(5); + final String defaultType = randomAlphaOfLength(3); + final FetchSourceContext fetchSource = FetchSourceContext.FETCH_SOURCE; + mgr.add(defaultIndex, defaultType, null, fetchSource, null, parser, true); + }); + assertThat( + e.toString(), + containsString( + "unexpected token [START_OBJECT], expected [FIELD_NAME] or [START_ARRAY]")); + } + public void testAddWithInvalidSourceValueIsRejected() throws Exception { String sourceValue = randomFrom("on", "off", "0", "1"); XContentParser parser = createParser(XContentFactory.jsonBuilder() @@ -40,7 +105,7 @@ public class MultiGetRequestTests extends ESTestCase { MultiGetRequest multiGetRequest = new MultiGetRequest(); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> multiGetRequest.add - (randomAsciiOfLength(5), randomAsciiOfLength(3), null, FetchSourceContext.FETCH_SOURCE, null, parser, true)); + (randomAlphaOfLength(5), randomAlphaOfLength(3), null, FetchSourceContext.FETCH_SOURCE, null, parser, true)); assertEquals("Failed to parse value [" + sourceValue + "] as only [true] or [false] are allowed.", ex.getMessage()); } @@ -60,7 +125,7 @@ public class MultiGetRequestTests extends ESTestCase { MultiGetRequest multiGetRequest = new MultiGetRequest(); multiGetRequest.add( - randomAsciiOfLength(5), randomAsciiOfLength(3), null, FetchSourceContext.FETCH_SOURCE, null, parser, true); + randomAlphaOfLength(5), randomAlphaOfLength(3), null, FetchSourceContext.FETCH_SOURCE, null, parser, true); assertEquals(2, multiGetRequest.getItems().size()); } diff --git a/core/src/test/java/org/elasticsearch/action/get/MultiGetShardRequestTests.java b/core/src/test/java/org/elasticsearch/action/get/MultiGetShardRequestTests.java index 1d3d2c1b553..a130f10d659 100644 --- a/core/src/test/java/org/elasticsearch/action/get/MultiGetShardRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/get/MultiGetShardRequestTests.java @@ -34,7 +34,7 @@ public class MultiGetShardRequestTests extends ESTestCase { public void testSerialization() throws IOException { MultiGetRequest multiGetRequest = new MultiGetRequest(); if (randomBoolean()) { - multiGetRequest.preference(randomAsciiOfLength(randomIntBetween(1, 10))); + multiGetRequest.preference(randomAlphaOfLength(randomIntBetween(1, 10))); } if (randomBoolean()) { multiGetRequest.realtime(false); @@ -45,12 +45,12 @@ public class MultiGetShardRequestTests extends ESTestCase { MultiGetShardRequest multiGetShardRequest = new MultiGetShardRequest(multiGetRequest, "index", 0); int numItems = iterations(10, 30); for (int i = 0; i < numItems; i++) { - MultiGetRequest.Item item = new MultiGetRequest.Item("alias-" + randomAsciiOfLength(randomIntBetween(1, 10)), "type", "id-" + i); + MultiGetRequest.Item item = new MultiGetRequest.Item("alias-" + randomAlphaOfLength(randomIntBetween(1, 10)), "type", "id-" + i); if (randomBoolean()) { int numFields = randomIntBetween(1, 5); String[] fields = new String[numFields]; for (int j = 0; j < fields.length; j++) { - fields[j] = randomAsciiOfLength(randomIntBetween(1, 10)); + fields[j] = randomAlphaOfLength(randomIntBetween(1, 10)); } item.storedFields(fields); } diff --git a/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java b/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java index 5b9649a7559..b4836496f88 100644 --- a/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java @@ -90,19 +90,19 @@ public class IndexRequestTests extends ESTestCase { } public void testIndexingRejectsLongIds() { - String id = randomAsciiOfLength(511); + String id = randomAlphaOfLength(511); IndexRequest request = new IndexRequest("index", "type", id); request.source("{}", XContentType.JSON); ActionRequestValidationException validate = request.validate(); assertNull(validate); - id = randomAsciiOfLength(512); + id = randomAlphaOfLength(512); request = new IndexRequest("index", "type", id); request.source("{}", XContentType.JSON); validate = request.validate(); assertNull(validate); - id = randomAsciiOfLength(513); + id = randomAlphaOfLength(513); request = new IndexRequest("index", "type", id); request.source("{}", XContentType.JSON); validate = request.validate(); @@ -130,9 +130,9 @@ public class IndexRequestTests extends ESTestCase { } public void testIndexResponse() { - ShardId shardId = new ShardId(randomAsciiOfLengthBetween(3, 10), randomAsciiOfLengthBetween(3, 10), randomIntBetween(0, 1000)); - String type = randomAsciiOfLengthBetween(3, 10); - String id = randomAsciiOfLengthBetween(3, 10); + ShardId shardId = new ShardId(randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10), randomIntBetween(0, 1000)); + String type = randomAlphaOfLengthBetween(3, 10); + String id = randomAlphaOfLengthBetween(3, 10); long version = randomLong(); boolean created = randomBoolean(); IndexResponse indexResponse = new IndexResponse(shardId, type, id, SequenceNumbersService.UNASSIGNED_SEQ_NO, version, created); diff --git a/core/src/test/java/org/elasticsearch/action/index/IndexResponseTests.java b/core/src/test/java/org/elasticsearch/action/index/IndexResponseTests.java index 24393e56e02..4afa0f7298c 100644 --- a/core/src/test/java/org/elasticsearch/action/index/IndexResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/index/IndexResponseTests.java @@ -104,11 +104,11 @@ public class IndexResponseTests extends ESTestCase { * expected {@link IndexResponse} after parsing. */ public static Tuple randomIndexResponse() { - String index = randomAsciiOfLength(5); - String indexUUid = randomAsciiOfLength(5); + String index = randomAlphaOfLength(5); + String indexUUid = randomAlphaOfLength(5); int shardId = randomIntBetween(0, 5); - String type = randomAsciiOfLength(5); - String id = randomAsciiOfLength(5); + String type = randomAlphaOfLength(5); + String id = randomAlphaOfLength(5); long seqNo = randomFrom(SequenceNumbersService.UNASSIGNED_SEQ_NO, randomNonNegativeLong(), (long) randomIntBetween(0, 10000)); long version = randomBoolean() ? randomNonNegativeLong() : randomIntBetween(0, 10000); boolean created = randomBoolean(); diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java index ab5d30c6f9b..6f8280277e0 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java @@ -71,14 +71,14 @@ public class SimulatePipelineRequestParsingTests extends ESTestCase { requestContent.put(Fields.DOCS, docs); for (int i = 0; i < numDocs; i++) { Map doc = new HashMap<>(); - String index = randomAsciiOfLengthBetween(1, 10); - String type = randomAsciiOfLengthBetween(1, 10); - String id = randomAsciiOfLengthBetween(1, 10); + String index = randomAlphaOfLengthBetween(1, 10); + String type = randomAlphaOfLengthBetween(1, 10); + String id = randomAlphaOfLengthBetween(1, 10); doc.put(INDEX.getFieldName(), index); doc.put(TYPE.getFieldName(), type); doc.put(ID.getFieldName(), id); - String fieldName = randomAsciiOfLengthBetween(1, 10); - String fieldValue = randomAsciiOfLengthBetween(1, 10); + String fieldName = randomAlphaOfLengthBetween(1, 10); + String fieldValue = randomAlphaOfLengthBetween(1, 10); doc.put(Fields.SOURCE, Collections.singletonMap(fieldName, fieldValue)); docs.add(doc); Map expectedDoc = new HashMap<>(); @@ -116,14 +116,14 @@ public class SimulatePipelineRequestParsingTests extends ESTestCase { requestContent.put(Fields.DOCS, docs); for (int i = 0; i < numDocs; i++) { Map doc = new HashMap<>(); - String index = randomAsciiOfLengthBetween(1, 10); - String type = randomAsciiOfLengthBetween(1, 10); - String id = randomAsciiOfLengthBetween(1, 10); + String index = randomAlphaOfLengthBetween(1, 10); + String type = randomAlphaOfLengthBetween(1, 10); + String id = randomAlphaOfLengthBetween(1, 10); doc.put(INDEX.getFieldName(), index); doc.put(TYPE.getFieldName(), type); doc.put(ID.getFieldName(), id); - String fieldName = randomAsciiOfLengthBetween(1, 10); - String fieldValue = randomAsciiOfLengthBetween(1, 10); + String fieldName = randomAlphaOfLengthBetween(1, 10); + String fieldValue = randomAlphaOfLengthBetween(1, 10); doc.put(Fields.SOURCE, Collections.singletonMap(fieldName, fieldValue)); docs.add(doc); Map expectedDoc = new HashMap<>(); @@ -190,7 +190,7 @@ public class SimulatePipelineRequestParsingTests extends ESTestCase { } public void testNonExistentPipelineId() { - String pipelineId = randomAsciiOfLengthBetween(1, 10); + String pipelineId = randomAlphaOfLengthBetween(1, 10); Map requestContent = new HashMap<>(); List> docs = new ArrayList<>(); requestContent.put(Fields.DOCS, docs); diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestTests.java index 86dc56cdd04..e3ca936bb84 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestTests.java @@ -38,7 +38,7 @@ public class SimulatePipelineRequestTests extends ESTestCase { SimulatePipelineRequest request = new SimulatePipelineRequest(new BytesArray(""), XContentType.JSON); // Sometimes we set an id if (randomBoolean()) { - request.setId(randomAsciiOfLengthBetween(1, 10)); + request.setId(randomAlphaOfLengthBetween(1, 10)); } // Sometimes we explicitly set a boolean (with whatever value) diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java index ad308b01bf2..be448a09db8 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java @@ -39,7 +39,7 @@ public class SimulatePipelineResponseTests extends ESTestCase { public void testSerialization() throws IOException { boolean isVerbose = randomBoolean(); - String id = randomBoolean() ? randomAsciiOfLengthBetween(1, 10) : null; + String id = randomBoolean() ? randomAlphaOfLengthBetween(1, 10) : null; int numResults = randomIntBetween(1, 10); List results = new ArrayList<>(numResults); for (int i = 0; i < numResults; i++) { @@ -49,7 +49,7 @@ public class SimulatePipelineResponseTests extends ESTestCase { int numProcessors = randomIntBetween(1, 10); List processorResults = new ArrayList<>(numProcessors); for (int j = 0; j < numProcessors; j++) { - String processorTag = randomAsciiOfLengthBetween(1, 10); + String processorTag = randomAlphaOfLengthBetween(1, 10); SimulateProcessorResult processorResult; if (isFailure) { processorResult = new SimulateProcessorResult(processorTag, new IllegalArgumentException("test")); diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java index 75d2d5834fe..3014a1a4ae6 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java @@ -36,7 +36,7 @@ import static org.hamcrest.Matchers.nullValue; public class SimulateProcessorResultTests extends ESTestCase { public void testSerialization() throws IOException { - String processorTag = randomAsciiOfLengthBetween(1, 10); + String processorTag = randomAlphaOfLengthBetween(1, 10); boolean isSuccessful = randomBoolean(); boolean isIgnoredException = randomBoolean(); SimulateProcessorResult simulateProcessorResult; diff --git a/core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java b/core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java index d5417526c0c..c5f67e23506 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java @@ -45,12 +45,12 @@ public class WriteableIngestDocumentTests extends ESTestCase { Map sourceAndMetadata = RandomDocumentPicks.randomSource(random()); int numFields = randomIntBetween(1, IngestDocument.MetaData.values().length); for (int i = 0; i < numFields; i++) { - sourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); + sourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAlphaOfLengthBetween(5, 10)); } Map ingestMetadata = new HashMap<>(); numFields = randomIntBetween(1, 5); for (int i = 0; i < numFields; i++) { - ingestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10)); + ingestMetadata.put(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)); } WriteableIngestDocument ingestDocument = new WriteableIngestDocument(new IngestDocument(sourceAndMetadata, ingestMetadata)); @@ -65,7 +65,7 @@ public class WriteableIngestDocumentTests extends ESTestCase { if (randomBoolean()) { numFields = randomIntBetween(1, IngestDocument.MetaData.values().length); for (int i = 0; i < numFields; i++) { - otherSourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); + otherSourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAlphaOfLengthBetween(5, 10)); } changed = true; } @@ -75,7 +75,7 @@ public class WriteableIngestDocumentTests extends ESTestCase { otherIngestMetadata = new HashMap<>(); numFields = randomIntBetween(1, 5); for (int i = 0; i < numFields; i++) { - otherIngestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10)); + otherIngestMetadata.put(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)); } changed = true; } else { @@ -101,12 +101,12 @@ public class WriteableIngestDocumentTests extends ESTestCase { Map sourceAndMetadata = RandomDocumentPicks.randomSource(random()); int numFields = randomIntBetween(1, IngestDocument.MetaData.values().length); for (int i = 0; i < numFields; i++) { - sourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); + sourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAlphaOfLengthBetween(5, 10)); } Map ingestMetadata = new HashMap<>(); numFields = randomIntBetween(1, 5); for (int i = 0; i < numFields; i++) { - ingestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10)); + ingestMetadata.put(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)); } WriteableIngestDocument writeableIngestDocument = new WriteableIngestDocument(new IngestDocument(sourceAndMetadata, ingestMetadata)); diff --git a/core/src/test/java/org/elasticsearch/action/main/MainActionTests.java b/core/src/test/java/org/elasticsearch/action/main/MainActionTests.java index f673af9d963..3407007d647 100644 --- a/core/src/test/java/org/elasticsearch/action/main/MainActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/main/MainActionTests.java @@ -54,7 +54,7 @@ public class MainActionTests extends ESTestCase { public void testMainResponseSerialization() throws IOException { final String nodeName = "node1"; final ClusterName clusterName = new ClusterName("cluster1"); - final String clusterUUID = randomAsciiOfLengthBetween(10, 20); + final String clusterUUID = randomAlphaOfLengthBetween(10, 20); final boolean available = randomBoolean(); final Version version = Version.CURRENT; final Build build = Build.CURRENT; @@ -73,7 +73,7 @@ public class MainActionTests extends ESTestCase { } public void testMainResponseXContent() throws IOException { - String clusterUUID = randomAsciiOfLengthBetween(10, 20); + String clusterUUID = randomAlphaOfLengthBetween(10, 20); final MainResponse mainResponse = new MainResponse("node1", Version.CURRENT, new ClusterName("cluster1"), clusterUUID, Build.CURRENT, false); final String expected = "{" + diff --git a/core/src/test/java/org/elasticsearch/action/main/MainResponseTests.java b/core/src/test/java/org/elasticsearch/action/main/MainResponseTests.java index 9233ca15308..429fe5cac7e 100644 --- a/core/src/test/java/org/elasticsearch/action/main/MainResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/main/MainResponseTests.java @@ -41,10 +41,10 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXC public class MainResponseTests extends ESTestCase { public static MainResponse createTestItem() { - String clusterUuid = randomAsciiOfLength(10); - ClusterName clusterName = new ClusterName(randomAsciiOfLength(10)); - String nodeName = randomAsciiOfLength(10); - Build build = new Build(randomAsciiOfLength(8), new Date(randomNonNegativeLong()).toString(), randomBoolean()); + String clusterUuid = randomAlphaOfLength(10); + ClusterName clusterName = new ClusterName(randomAlphaOfLength(10)); + String nodeName = randomAlphaOfLength(10); + Build build = new Build(randomAlphaOfLength(8), new Date(randomNonNegativeLong()).toString(), randomBoolean()); Version version = VersionUtils.randomVersion(random()); boolean available = randomBoolean(); return new MainResponse(nodeName, version, clusterName, clusterUuid , build, available); @@ -108,10 +108,10 @@ public class MainResponseTests extends ESTestCase { ClusterName clusterName = o.getClusterName(); switch (randomIntBetween(0, 5)) { case 0: - clusterUuid = clusterUuid + randomAsciiOfLength(5); + clusterUuid = clusterUuid + randomAlphaOfLength(5); break; case 1: - nodeName = nodeName + randomAsciiOfLength(5); + nodeName = nodeName + randomAlphaOfLength(5); break; case 2: available = !available; @@ -124,7 +124,7 @@ public class MainResponseTests extends ESTestCase { version = randomValueOtherThan(version, () -> VersionUtils.randomVersion(random())); break; case 5: - clusterName = new ClusterName(clusterName + randomAsciiOfLength(5)); + clusterName = new ClusterName(clusterName + randomAlphaOfLength(5)); break; } return new MainResponse(nodeName, version, clusterName, clusterUuid, build, available); diff --git a/core/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTookTests.java b/core/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTookTests.java index cd8b0743675..beec582b13f 100644 --- a/core/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTookTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTookTests.java @@ -125,7 +125,7 @@ public class AbstractSearchAsyncActionTookTests extends ESTestCase { protected void executePhaseOnShard( final ShardIterator shardIt, final ShardRouting shard, - final ActionListener listener) { + final SearchActionListener listener) { } diff --git a/core/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java b/core/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java index 6995ad93f25..ccb75ff3ab4 100644 --- a/core/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java @@ -46,7 +46,7 @@ public class CountedCollectorTests extends ESTestCase { runnable.run(); } }; - CountedCollector collector = new CountedCollector<>(results::set, numResultsExpected, + CountedCollector collector = new CountedCollector<>(r -> results.set(r.getShardIndex(), r), numResultsExpected, latch::countDown, context); for (int i = 0; i < numResultsExpected; i++) { int shardID = i; @@ -57,8 +57,12 @@ public class CountedCollectorTests extends ESTestCase { break; case 1: state.add(1); - executor.execute(() -> collector.onResult(shardID, new DfsSearchResult(shardID, null), new SearchShardTarget("foo", - new Index("bar", "baz"), shardID))); + executor.execute(() -> { + DfsSearchResult dfsSearchResult = new DfsSearchResult(shardID, null); + dfsSearchResult.setShardIndex(shardID); + dfsSearchResult.setSearchShardTarget(new SearchShardTarget("foo", + new Index("bar", "baz"), shardID)); + collector.onResult(dfsSearchResult);}); break; case 2: state.add(2); @@ -79,7 +83,7 @@ public class CountedCollectorTests extends ESTestCase { break; case 1: assertNotNull(results.get(i)); - assertEquals(i, results.get(i).id()); + assertEquals(i, results.get(i).getRequestId()); break; case 2: final int shardId = i; diff --git a/core/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java b/core/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java index ba01559e0f0..c2f21a7cc2c 100644 --- a/core/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java @@ -18,52 +18,42 @@ */ package org.elasticsearch.action.search; -import org.apache.logging.log4j.Logger; import org.apache.lucene.index.Term; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.MockDirectoryWrapper; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.cluster.routing.ShardIterator; -import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.Index; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.dfs.DfsSearchResult; -import org.elasticsearch.search.fetch.FetchSearchResult; -import org.elasticsearch.search.fetch.QueryFetchSearchResult; -import org.elasticsearch.search.fetch.ShardFetchSearchRequest; -import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.internal.ShardSearchTransportRequest; import org.elasticsearch.search.query.QuerySearchRequest; import org.elasticsearch.search.query.QuerySearchResult; -import org.elasticsearch.search.query.QuerySearchResultProvider; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.Transport; import java.io.IOException; import java.io.UncheckedIOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; public class DfsQueryPhaseTests extends ESTestCase { + private static DfsSearchResult newSearchResult(int shardIndex, long requestId, SearchShardTarget target) { + DfsSearchResult result = new DfsSearchResult(requestId, target); + result.setShardIndex(shardIndex); + return result; + } + public void testDfsWith2Shards() throws IOException { AtomicArray results = new AtomicArray<>(2); - AtomicReference> responseRef = new AtomicReference<>(); - results.set(0, new DfsSearchResult(1, new SearchShardTarget("node1", new Index("test", "na"), 0))); - results.set(1, new DfsSearchResult(2, new SearchShardTarget("node2", new Index("test", "na"), 0))); + AtomicReference> responseRef = new AtomicReference<>(); + results.set(0, newSearchResult(0, 1, new SearchShardTarget("node1", new Index("test", "na"), 0))); + results.set(1, newSearchResult(1, 2, new SearchShardTarget("node2", new Index("test", "na"), 0))); results.get(0).termsStatistics(new Term[0], new TermStatistics[0]); results.get(1).termsStatistics(new Term[0], new TermStatistics[0]); @@ -73,7 +63,7 @@ public class DfsQueryPhaseTests extends ESTestCase { @Override public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest request, SearchTask task, - ActionListener listener) { + SearchActionListener listener) { if (request.id() == 1) { QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node1", new Index("test", "na"), 0)); queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(42, 1.0F)}, 2.0F), new DocValueFormat[0]); @@ -116,9 +106,9 @@ public class DfsQueryPhaseTests extends ESTestCase { public void testDfsWith1ShardFailed() throws IOException { AtomicArray results = new AtomicArray<>(2); - AtomicReference> responseRef = new AtomicReference<>(); - results.set(0, new DfsSearchResult(1, new SearchShardTarget("node1", new Index("test", "na"), 0))); - results.set(1, new DfsSearchResult(2, new SearchShardTarget("node2", new Index("test", "na"), 0))); + AtomicReference> responseRef = new AtomicReference<>(); + results.set(0, newSearchResult(0, 1, new SearchShardTarget("node1", new Index("test", "na"), 0))); + results.set(1, newSearchResult(1, 2, new SearchShardTarget("node2", new Index("test", "na"), 0))); results.get(0).termsStatistics(new Term[0], new TermStatistics[0]); results.get(1).termsStatistics(new Term[0], new TermStatistics[0]); @@ -128,7 +118,7 @@ public class DfsQueryPhaseTests extends ESTestCase { @Override public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest request, SearchTask task, - ActionListener listener) { + SearchActionListener listener) { if (request.id() == 1) { QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node1", new Index("test", "na"), 0)); queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(42, 1.0F)}, 2.0F), new DocValueFormat[0]); @@ -171,9 +161,9 @@ public class DfsQueryPhaseTests extends ESTestCase { public void testFailPhaseOnException() throws IOException { AtomicArray results = new AtomicArray<>(2); - AtomicReference> responseRef = new AtomicReference<>(); - results.set(0, new DfsSearchResult(1, new SearchShardTarget("node1", new Index("test", "na"), 0))); - results.set(1, new DfsSearchResult(2, new SearchShardTarget("node2", new Index("test", "na"), 0))); + AtomicReference> responseRef = new AtomicReference<>(); + results.set(0, newSearchResult(0, 1, new SearchShardTarget("node1", new Index("test", "na"), 0))); + results.set(1, newSearchResult(1, 2, new SearchShardTarget("node2", new Index("test", "na"), 0))); results.get(0).termsStatistics(new Term[0], new TermStatistics[0]); results.get(1).termsStatistics(new Term[0], new TermStatistics[0]); @@ -183,7 +173,7 @@ public class DfsQueryPhaseTests extends ESTestCase { @Override public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest request, SearchTask task, - ActionListener listener) { + SearchActionListener listener) { if (request.id() == 1) { QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node1", new Index("test", "na"), 0)); queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(42, 1.0F)}, 2.0F), new DocValueFormat[0]); diff --git a/core/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/core/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index 14c2eb6f63f..239f8f10a41 100644 --- a/core/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -21,20 +21,18 @@ package org.elasticsearch.action.search; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.MockDirectoryWrapper; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.Index; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.QueryFetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchSearchRequest; import org.elasticsearch.search.query.QuerySearchResult; -import org.elasticsearch.search.query.QuerySearchResultProvider; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.Transport; @@ -48,7 +46,7 @@ public class FetchSearchPhaseTests extends ESTestCase { public void testShortcutQueryAndFetchOptimization() throws IOException { SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, BigArrays.NON_RECYCLING_INSTANCE, null); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); - InitialSearchPhase.SearchPhaseResults results = + InitialSearchPhase.SearchPhaseResults results = controller.newSearchPhaseResults(mockSearchPhaseContext.getRequest(), 1); AtomicReference responseRef = new AtomicReference<>(); boolean hasHits = randomBoolean(); @@ -59,7 +57,9 @@ public class FetchSearchPhaseTests extends ESTestCase { queryResult.size(1); FetchSearchResult fetchResult = new FetchSearchResult(); fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(42)}, 1, 1.0F)); - results.consumeResult(0, new QueryFetchSearchResult(queryResult, fetchResult)); + QueryFetchSearchResult fetchSearchResult = new QueryFetchSearchResult(queryResult, fetchResult); + fetchSearchResult.setShardIndex(0); + results.consumeResult(fetchSearchResult); numHits = 1; } else { numHits = 0; @@ -86,25 +86,27 @@ public class FetchSearchPhaseTests extends ESTestCase { public void testFetchTwoDocument() throws IOException { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, BigArrays.NON_RECYCLING_INSTANCE, null); - InitialSearchPhase.SearchPhaseResults results = + InitialSearchPhase.SearchPhaseResults results = controller.newSearchPhaseResults(mockSearchPhaseContext.getRequest(), 2); AtomicReference responseRef = new AtomicReference<>(); int resultSetSize = randomIntBetween(2, 10); QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node1", new Index("test", "na"), 0)); queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(42, 1.0F)}, 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set - results.consumeResult(0, queryResult); + queryResult.setShardIndex(0); + results.consumeResult(queryResult); queryResult = new QuerySearchResult(321, new SearchShardTarget("node2", new Index("test", "na"), 1)); queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(84, 2.0F)}, 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); - results.consumeResult(1, queryResult); + queryResult.setShardIndex(1); + results.consumeResult(queryResult); SearchTransportService searchTransportService = new SearchTransportService( Settings.builder().put("search.remote.connect", false).build(), null, null) { @Override public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task, - ActionListener listener) { + SearchActionListener listener) { FetchSearchResult fetchResult = new FetchSearchResult(); if (request.id() == 321) { fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(84)}, 1, 2.0F)); @@ -138,25 +140,27 @@ public class FetchSearchPhaseTests extends ESTestCase { public void testFailFetchOneDoc() throws IOException { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, BigArrays.NON_RECYCLING_INSTANCE, null); - InitialSearchPhase.SearchPhaseResults results = + InitialSearchPhase.SearchPhaseResults results = controller.newSearchPhaseResults(mockSearchPhaseContext.getRequest(), 2); AtomicReference responseRef = new AtomicReference<>(); int resultSetSize = randomIntBetween(2, 10); QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node1", new Index("test", "na"), 0)); queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(42, 1.0F)}, 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set - results.consumeResult(0, queryResult); + queryResult.setShardIndex(0); + results.consumeResult(queryResult); queryResult = new QuerySearchResult(321, new SearchShardTarget("node2", new Index("test", "na"), 1)); queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(84, 2.0F)}, 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); - results.consumeResult(1, queryResult); + queryResult.setShardIndex(1); + results.consumeResult(queryResult); SearchTransportService searchTransportService = new SearchTransportService( Settings.builder().put("search.remote.connect", false).build(), null, null) { @Override public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task, - ActionListener listener) { + SearchActionListener listener) { if (request.id() == 321) { FetchSearchResult fetchResult = new FetchSearchResult(); fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(84)}, 1, 2.0F)); @@ -195,20 +199,21 @@ public class FetchSearchPhaseTests extends ESTestCase { int numHits = randomIntBetween(2, 100); // also numshards --> 1 hit per shard SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, BigArrays.NON_RECYCLING_INSTANCE, null); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(numHits); - InitialSearchPhase.SearchPhaseResults results = + InitialSearchPhase.SearchPhaseResults results = controller.newSearchPhaseResults(mockSearchPhaseContext.getRequest(), numHits); AtomicReference responseRef = new AtomicReference<>(); for (int i = 0; i < numHits; i++) { QuerySearchResult queryResult = new QuerySearchResult(i, new SearchShardTarget("node1", new Index("test", "na"), 0)); queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(i+1, i)}, i), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set - results.consumeResult(i, queryResult); + queryResult.setShardIndex(i); + results.consumeResult(queryResult); } SearchTransportService searchTransportService = new SearchTransportService( Settings.builder().put("search.remote.connect", false).build(), null, null) { @Override public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task, - ActionListener listener) { + SearchActionListener listener) { new Thread(() -> { FetchSearchResult fetchResult = new FetchSearchResult(); fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit((int) (request.id()+1))}, 1, 100F)); @@ -249,25 +254,27 @@ public class FetchSearchPhaseTests extends ESTestCase { public void testExceptionFailsPhase() throws IOException { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, BigArrays.NON_RECYCLING_INSTANCE, null); - InitialSearchPhase.SearchPhaseResults results = + InitialSearchPhase.SearchPhaseResults results = controller.newSearchPhaseResults(mockSearchPhaseContext.getRequest(), 2); AtomicReference responseRef = new AtomicReference<>(); int resultSetSize = randomIntBetween(2, 10); QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node1", new Index("test", "na"), 0)); queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(42, 1.0F)}, 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set - results.consumeResult(0, queryResult); + queryResult.setShardIndex(0); + results.consumeResult(queryResult); queryResult = new QuerySearchResult(321, new SearchShardTarget("node2", new Index("test", "na"), 1)); queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(84, 2.0F)}, 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); - results.consumeResult(1, queryResult); + queryResult.setShardIndex(1); + results.consumeResult(queryResult); AtomicInteger numFetches = new AtomicInteger(0); SearchTransportService searchTransportService = new SearchTransportService( Settings.builder().put("search.remote.connect", false).build(), null, null) { @Override public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task, - ActionListener listener) { + SearchActionListener listener) { FetchSearchResult fetchResult = new FetchSearchResult(); if (numFetches.incrementAndGet() == 1) { throw new RuntimeException("BOOM"); @@ -300,25 +307,27 @@ public class FetchSearchPhaseTests extends ESTestCase { public void testCleanupIrrelevantContexts() throws IOException { // contexts that are not fetched should be cleaned up MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, BigArrays.NON_RECYCLING_INSTANCE, null); - InitialSearchPhase.SearchPhaseResults results = + InitialSearchPhase.SearchPhaseResults results = controller.newSearchPhaseResults(mockSearchPhaseContext.getRequest(), 2); AtomicReference responseRef = new AtomicReference<>(); int resultSetSize = 1; QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node1", new Index("test", "na"), 0)); queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(42, 1.0F)}, 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set - results.consumeResult(0, queryResult); + queryResult.setShardIndex(0); + results.consumeResult(queryResult); queryResult = new QuerySearchResult(321, new SearchShardTarget("node2", new Index("test", "na"), 1)); queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(84, 2.0F)}, 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); - results.consumeResult(1, queryResult); + queryResult.setShardIndex(1); + results.consumeResult(queryResult); SearchTransportService searchTransportService = new SearchTransportService( Settings.builder().put("search.remote.connect", false).build(), null, null) { @Override public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task, - ActionListener listener) { + SearchActionListener listener) { FetchSearchResult fetchResult = new FetchSearchResult(); if (request.id() == 321) { fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(84)}, 1, 2.0F)); diff --git a/core/src/test/java/org/elasticsearch/action/search/RemoteClusterConnectionTests.java b/core/src/test/java/org/elasticsearch/action/search/RemoteClusterConnectionTests.java index f2493adac1e..15c735cafa6 100644 --- a/core/src/test/java/org/elasticsearch/action/search/RemoteClusterConnectionTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/RemoteClusterConnectionTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.CancellableThreads; +import org.elasticsearch.discovery.Discovery; import org.elasticsearch.mocksocket.MockServerSocket; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; @@ -81,23 +82,33 @@ public class RemoteClusterConnectionTests extends ESTestCase { } public static MockTransportService startTransport(String id, List knownNodes, Version version, ThreadPool threadPool) { + return startTransport(id, knownNodes, version, threadPool, Settings.EMPTY); + } + + public static MockTransportService startTransport( + final String id, + final List knownNodes, + final Version version, + final ThreadPool threadPool, + final Settings settings) { boolean success = false; - MockTransportService newService = MockTransportService.createNewService(Settings.EMPTY, version, threadPool, null); + final Settings s = Settings.builder().put(settings).put("node.name", id).build(); + MockTransportService newService = MockTransportService.createNewService(s, version, threadPool, null); try { newService.registerRequestHandler(ClusterSearchShardsAction.NAME, ClusterSearchShardsRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { - channel.sendResponse(new ClusterSearchShardsResponse(new ClusterSearchShardsGroup[0], - knownNodes.toArray(new DiscoveryNode[0]), Collections.emptyMap())); - }); + (request, channel) -> { + channel.sendResponse(new ClusterSearchShardsResponse(new ClusterSearchShardsGroup[0], + knownNodes.toArray(new DiscoveryNode[0]), Collections.emptyMap())); + }); newService.registerRequestHandler(ClusterStateAction.NAME, ClusterStateRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { - DiscoveryNodes.Builder builder = DiscoveryNodes.builder(); - for (DiscoveryNode node : knownNodes) { - builder.add(node); - } - ClusterState build = ClusterState.builder(ClusterName.DEFAULT).nodes(builder.build()).build(); - channel.sendResponse(new ClusterStateResponse(ClusterName.DEFAULT, build, 0L)); - }); + (request, channel) -> { + DiscoveryNodes.Builder builder = DiscoveryNodes.builder(); + for (DiscoveryNode node : knownNodes) { + builder.add(node); + } + ClusterState build = ClusterState.builder(ClusterName.DEFAULT).nodes(builder.build()).build(); + channel.sendResponse(new ClusterStateResponse(ClusterName.DEFAULT, build, 0L)); + }); newService.start(); newService.acceptIncomingRequests(); success = true; diff --git a/core/src/test/java/org/elasticsearch/action/search/RemoteClusterServiceTests.java b/core/src/test/java/org/elasticsearch/action/search/RemoteClusterServiceTests.java index 1531d66e5da..d0f0427e710 100644 --- a/core/src/test/java/org/elasticsearch/action/search/RemoteClusterServiceTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/RemoteClusterServiceTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.search; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsGroup; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -49,6 +50,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; public class RemoteClusterServiceTests extends ESTestCase { @@ -62,7 +64,16 @@ public class RemoteClusterServiceTests extends ESTestCase { } private MockTransportService startTransport(String id, List knownNodes, Version version) { - return RemoteClusterConnectionTests.startTransport(id, knownNodes, version, threadPool); + return startTransport(id, knownNodes, version, Settings.EMPTY); + } + + private MockTransportService startTransport( + final String id, + final List knownNodes, + final Version version, + final Settings settings) { + return RemoteClusterConnectionTests.startTransport( + id, knownNodes, version, threadPool, settings); } public void testSettingsAreRegistered() { @@ -247,4 +258,81 @@ public class RemoteClusterServiceTests extends ESTestCase { assertEquals(new MatchAllQueryBuilder(), remoteAliases.get("bar_id").getQueryBuilder()); } } + + public void testRemoteNodeAttribute() throws IOException, InterruptedException { + final Settings settings = + Settings.builder().put("search.remote.node.attr", "gateway").build(); + final List knownNodes = new CopyOnWriteArrayList<>(); + final Settings gateway = Settings.builder().put("node.attr.gateway", true).build(); + try (MockTransportService c1N1 = + startTransport("cluster_1_node_1", knownNodes, Version.CURRENT); + MockTransportService c1N2 = + startTransport("cluster_1_node_2", knownNodes, Version.CURRENT, gateway); + MockTransportService c2N1 = + startTransport("cluster_2_node_1", knownNodes, Version.CURRENT); + MockTransportService c2N2 = + startTransport("cluster_2_node_2", knownNodes, Version.CURRENT, gateway)) { + final DiscoveryNode c1N1Node = c1N1.getLocalDiscoNode(); + final DiscoveryNode c1N2Node = c1N2.getLocalDiscoNode(); + final DiscoveryNode c2N1Node = c2N1.getLocalDiscoNode(); + final DiscoveryNode c2N2Node = c2N2.getLocalDiscoNode(); + knownNodes.add(c1N1Node); + knownNodes.add(c1N2Node); + knownNodes.add(c2N1Node); + knownNodes.add(c2N2Node); + Collections.shuffle(knownNodes, random()); + + try (MockTransportService transportService = MockTransportService.createNewService( + settings, + Version.CURRENT, + threadPool, + null)) { + transportService.start(); + transportService.acceptIncomingRequests(); + final Settings.Builder builder = Settings.builder(); + builder.putArray( + "search.remote.cluster_1.seeds", c1N1Node.getAddress().toString()); + builder.putArray( + "search.remote.cluster_2.seeds", c2N1Node.getAddress().toString()); + try (RemoteClusterService service = + new RemoteClusterService(settings, transportService)) { + assertFalse(service.isCrossClusterSearchEnabled()); + service.initializeRemoteClusters(); + assertFalse(service.isCrossClusterSearchEnabled()); + + final InetSocketAddress c1N1Address = c1N1Node.getAddress().address(); + final InetSocketAddress c1N2Address = c1N2Node.getAddress().address(); + final InetSocketAddress c2N1Address = c2N1Node.getAddress().address(); + final InetSocketAddress c2N2Address = c2N2Node.getAddress().address(); + + final CountDownLatch firstLatch = new CountDownLatch(1); + service.updateRemoteCluster( + "cluster_1", + Arrays.asList(c1N1Address, c1N2Address), + connectionListener(firstLatch)); + firstLatch.await(); + + final CountDownLatch secondLatch = new CountDownLatch(1); + service.updateRemoteCluster( + "cluster_2", + Arrays.asList(c2N1Address, c2N2Address), + connectionListener(secondLatch)); + secondLatch.await(); + + assertTrue(service.isCrossClusterSearchEnabled()); + assertTrue(service.isRemoteClusterRegistered("cluster_1")); + assertFalse(service.isRemoteNodeConnected("cluster_1", c1N1Node)); + assertTrue(service.isRemoteNodeConnected("cluster_1", c1N2Node)); + assertTrue(service.isRemoteClusterRegistered("cluster_2")); + assertFalse(service.isRemoteNodeConnected("cluster_2", c2N1Node)); + assertTrue(service.isRemoteNodeConnected("cluster_2", c2N2Node)); + } + } + } + } + + private ActionListener connectionListener(final CountDownLatch latch) { + return ActionListener.wrap(x -> latch.countDown(), x -> fail()); + } + } diff --git a/core/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/core/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index 53e4eb59ae5..4813dc8ae7d 100644 --- a/core/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchPhaseResult; -import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.Transport; @@ -111,13 +110,14 @@ public class SearchAsyncActionTests extends ESTestCase { TestSearchResponse response = new TestSearchResponse(); @Override - protected void executePhaseOnShard(ShardIterator shardIt, ShardRouting shard, ActionListener listener) { + protected void executePhaseOnShard(ShardIterator shardIt, ShardRouting shard, SearchActionListener + listener) { assertTrue("shard: " + shard.shardId() + " has been queried twice", response.queried.add(shard.shardId())); Transport.Connection connection = getConnection(shard.currentNodeId()); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult(contextIdGenerator.incrementAndGet(), connection.getNode()); Set ids = nodeToContextMap.computeIfAbsent(connection.getNode(), (n) -> new HashSet<>()); - ids.add(testSearchPhaseResult.id); + ids.add(testSearchPhaseResult.getRequestId()); if (randomBoolean()) { listener.onResponse(testSearchPhaseResult); } else { @@ -132,8 +132,8 @@ public class SearchAsyncActionTests extends ESTestCase { public void run() throws IOException { for (int i = 0; i < results.getNumShards(); i++) { TestSearchPhaseResult result = results.results.get(i); - assertEquals(result.node.getId(), result.shardTarget().getNodeId()); - sendReleaseSearchContext(result.id(), new MockConnection(result.node)); + assertEquals(result.node.getId(), result.getSearchShardTarget().getNodeId()); + sendReleaseSearchContext(result.getRequestId(), new MockConnection(result.node)); } responseListener.onResponse(response); latch.countDown(); @@ -193,32 +193,14 @@ public class SearchAsyncActionTests extends ESTestCase { public final Set queried = new HashSet<>(); } - public static class TestSearchPhaseResult implements SearchPhaseResult { - final long id; + public static class TestSearchPhaseResult extends SearchPhaseResult { final DiscoveryNode node; - SearchShardTarget shardTarget; public TestSearchPhaseResult(long id, DiscoveryNode node) { - this.id = id; + this.requestId = id; this.node = node; } - @Override - public long id() { - return id; - } - - @Override - public SearchShardTarget shardTarget() { - return this.shardTarget; - } - - @Override - public void shardTarget(SearchShardTarget shardTarget) { - this.shardTarget = shardTarget; - - } - @Override public void readFrom(StreamInput in) throws IOException { diff --git a/core/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/core/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index 36756aba946..632a160d8ee 100644 --- a/core/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.Index; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.InternalAggregations; @@ -38,7 +39,6 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.query.QuerySearchResult; -import org.elasticsearch.search.query.QuerySearchResultProvider; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; import org.elasticsearch.test.ESTestCase; @@ -70,12 +70,12 @@ public class SearchPhaseControllerTests extends ESTestCase { public void testSort() throws Exception { List suggestions = new ArrayList<>(); for (int i = 0; i < randomIntBetween(1, 5); i++) { - suggestions.add(new CompletionSuggestion(randomAsciiOfLength(randomIntBetween(1, 5)), randomIntBetween(1, 20))); + suggestions.add(new CompletionSuggestion(randomAlphaOfLength(randomIntBetween(1, 5)), randomIntBetween(1, 20))); } int nShards = randomIntBetween(1, 20); int queryResultSize = randomBoolean() ? 0 : randomIntBetween(1, nShards * 2); - AtomicArray results = generateQueryResults(nShards, suggestions, queryResultSize, false); - ScoreDoc[] sortedDocs = searchPhaseController.sortDocs(true, results); + AtomicArray results = generateQueryResults(nShards, suggestions, queryResultSize, false); + ScoreDoc[] sortedDocs = searchPhaseController.sortDocs(true, results.asList(), nShards); int accumulatedLength = Math.min(queryResultSize, getTotalQueryHits(results)); for (Suggest.Suggestion suggestion : reducedSuggest(results)) { int suggestionSize = suggestion.getEntries().get(0).getOptions().size(); @@ -87,23 +87,23 @@ public class SearchPhaseControllerTests extends ESTestCase { public void testSortIsIdempotent() throws IOException { int nShards = randomIntBetween(1, 20); int queryResultSize = randomBoolean() ? 0 : randomIntBetween(1, nShards * 2); - AtomicArray results = generateQueryResults(nShards, Collections.emptyList(), queryResultSize, + AtomicArray results = generateQueryResults(nShards, Collections.emptyList(), queryResultSize, randomBoolean() || true); boolean ignoreFrom = randomBoolean(); - ScoreDoc[] sortedDocs = searchPhaseController.sortDocs(ignoreFrom, results); + ScoreDoc[] sortedDocs = searchPhaseController.sortDocs(ignoreFrom, results.asList(), nShards); - ScoreDoc[] sortedDocs2 = searchPhaseController.sortDocs(ignoreFrom, results); + ScoreDoc[] sortedDocs2 = searchPhaseController.sortDocs(ignoreFrom, results.asList(), nShards); assertArrayEquals(sortedDocs, sortedDocs2); } public void testMerge() throws IOException { List suggestions = new ArrayList<>(); for (int i = 0; i < randomIntBetween(1, 5); i++) { - suggestions.add(new CompletionSuggestion(randomAsciiOfLength(randomIntBetween(1, 5)), randomIntBetween(1, 20))); + suggestions.add(new CompletionSuggestion(randomAlphaOfLength(randomIntBetween(1, 5)), randomIntBetween(1, 20))); } int nShards = randomIntBetween(1, 20); int queryResultSize = randomBoolean() ? 0 : randomIntBetween(1, nShards * 2); - AtomicArray queryResults = generateQueryResults(nShards, suggestions, queryResultSize, false); + AtomicArray queryResults = generateQueryResults(nShards, suggestions, queryResultSize, false); // calculate offsets and score doc array List mergedScoreDocs = new ArrayList<>(); @@ -119,9 +119,10 @@ public class SearchPhaseControllerTests extends ESTestCase { } } ScoreDoc[] sortedDocs = mergedScoreDocs.toArray(new ScoreDoc[mergedScoreDocs.size()]); + AtomicArray searchPhaseResultAtomicArray = generateFetchResults(nShards, mergedSearchDocs, mergedSuggest); InternalSearchResponse mergedResponse = searchPhaseController.merge(true, sortedDocs, searchPhaseController.reducedQueryPhase(queryResults.asList()), - generateFetchResults(nShards, mergedSearchDocs, mergedSuggest)); + searchPhaseResultAtomicArray.asList(), searchPhaseResultAtomicArray::get); assertThat(mergedResponse.hits().getHits().length, equalTo(mergedSearchDocs.length)); Suggest suggestResult = mergedResponse.suggest(); for (Suggest.Suggestion suggestion : mergedSuggest) { @@ -138,10 +139,10 @@ public class SearchPhaseControllerTests extends ESTestCase { } } - private AtomicArray generateQueryResults(int nShards, - List suggestions, - int searchHitsSize, boolean useConstantScore) { - AtomicArray queryResults = new AtomicArray<>(nShards); + private AtomicArray generateQueryResults(int nShards, + List suggestions, + int searchHitsSize, boolean useConstantScore) { + AtomicArray queryResults = new AtomicArray<>(nShards); for (int shardIndex = 0; shardIndex < nShards; shardIndex++) { QuerySearchResult querySearchResult = new QuerySearchResult(shardIndex, new SearchShardTarget("", new Index("", ""), shardIndex)); @@ -181,23 +182,24 @@ public class SearchPhaseControllerTests extends ESTestCase { querySearchResult.topDocs(topDocs, null); querySearchResult.size(searchHitsSize); querySearchResult.suggest(new Suggest(new ArrayList<>(shardSuggestion))); + querySearchResult.setShardIndex(shardIndex); queryResults.set(shardIndex, querySearchResult); } return queryResults; } - private int getTotalQueryHits(AtomicArray results) { + private int getTotalQueryHits(AtomicArray results) { int resultCount = 0; - for (AtomicArray.Entry shardResult : results.asList()) { - resultCount += shardResult.value.queryResult().topDocs().totalHits; + for (SearchPhaseResult shardResult : results.asList()) { + resultCount += shardResult.queryResult().topDocs().totalHits; } return resultCount; } - private Suggest reducedSuggest(AtomicArray results) { + private Suggest reducedSuggest(AtomicArray results) { Map>> groupedSuggestion = new HashMap<>(); - for (AtomicArray.Entry entry : results.asList()) { - for (Suggest.Suggestion suggestion : entry.value.queryResult().suggest()) { + for (SearchPhaseResult entry : results.asList()) { + for (Suggest.Suggestion suggestion : entry.queryResult().suggest()) { List> suggests = groupedSuggestion.computeIfAbsent(suggestion.getName(), s -> new ArrayList<>()); suggests.add((Suggest.Suggestion) suggestion); @@ -207,18 +209,18 @@ public class SearchPhaseControllerTests extends ESTestCase { .collect(Collectors.toList())); } - private ScoreDoc[] getTopShardDocs(AtomicArray results) throws IOException { - List> resultList = results.asList(); + private ScoreDoc[] getTopShardDocs(AtomicArray results) throws IOException { + List resultList = results.asList(); TopDocs[] shardTopDocs = new TopDocs[resultList.size()]; for (int i = 0; i < resultList.size(); i++) { - shardTopDocs[i] = resultList.get(i).value.queryResult().topDocs(); + shardTopDocs[i] = resultList.get(i).queryResult().topDocs(); } int topN = Math.min(results.get(0).queryResult().size(), getTotalQueryHits(results)); return TopDocs.merge(topN, shardTopDocs).scoreDocs; } - private AtomicArray generateFetchResults(int nShards, ScoreDoc[] mergedSearchDocs, Suggest mergedSuggest) { - AtomicArray fetchResults = new AtomicArray<>(nShards); + private AtomicArray generateFetchResults(int nShards, ScoreDoc[] mergedSearchDocs, Suggest mergedSuggest) { + AtomicArray fetchResults = new AtomicArray<>(nShards); for (int shardIndex = 0; shardIndex < nShards; shardIndex++) { float maxScore = -1F; SearchShardTarget shardTarget = new SearchShardTarget("", new Index("", ""), shardIndex); @@ -257,27 +259,30 @@ public class SearchPhaseControllerTests extends ESTestCase { SearchRequest request = new SearchRequest(); request.source(new SearchSourceBuilder().aggregation(AggregationBuilders.avg("foo"))); request.setBatchedReduceSize(bufferSize); - InitialSearchPhase.SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(request, 3); + InitialSearchPhase.SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(request, 3); QuerySearchResult result = new QuerySearchResult(0, new SearchShardTarget("node", new Index("a", "b"), 0)); result.topDocs(new TopDocs(0, new ScoreDoc[0], 0.0F), new DocValueFormat[0]); InternalAggregations aggs = new InternalAggregations(Arrays.asList(new InternalMax("test", 1.0D, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()))); result.aggregations(aggs); - consumer.consumeResult(0, result); + result.setShardIndex(0); + consumer.consumeResult(result); result = new QuerySearchResult(1, new SearchShardTarget("node", new Index("a", "b"), 0)); result.topDocs(new TopDocs(0, new ScoreDoc[0], 0.0F), new DocValueFormat[0]); aggs = new InternalAggregations(Arrays.asList(new InternalMax("test", 3.0D, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()))); result.aggregations(aggs); - consumer.consumeResult(2, result); + result.setShardIndex(2); + consumer.consumeResult(result); result = new QuerySearchResult(1, new SearchShardTarget("node", new Index("a", "b"), 0)); result.topDocs(new TopDocs(0, new ScoreDoc[0], 0.0F), new DocValueFormat[0]); aggs = new InternalAggregations(Arrays.asList(new InternalMax("test", 2.0D, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()))); result.aggregations(aggs); - consumer.consumeResult(1, result); + result.setShardIndex(1); + consumer.consumeResult(result); int numTotalReducePhases = 1; if (bufferSize == 2) { assertThat(consumer, instanceOf(SearchPhaseController.QueryPhaseResultConsumer.class)); @@ -301,7 +306,7 @@ public class SearchPhaseControllerTests extends ESTestCase { SearchRequest request = new SearchRequest(); request.source(new SearchSourceBuilder().aggregation(AggregationBuilders.avg("foo"))); request.setBatchedReduceSize(bufferSize); - InitialSearchPhase.SearchPhaseResults consumer = + InitialSearchPhase.SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(request, expectedNumResults); AtomicInteger max = new AtomicInteger(); CountDownLatch latch = new CountDownLatch(expectedNumResults); @@ -315,7 +320,8 @@ public class SearchPhaseControllerTests extends ESTestCase { InternalAggregations aggs = new InternalAggregations(Arrays.asList(new InternalMax("test", (double) number, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()))); result.aggregations(aggs); - consumer.consumeResult(id, result); + result.setShardIndex(id); + consumer.consumeResult(result); latch.countDown(); }); @@ -337,7 +343,7 @@ public class SearchPhaseControllerTests extends ESTestCase { request.source(new SearchSourceBuilder().aggregation(AggregationBuilders.avg("foo"))); } request.setBatchedReduceSize(bufferSize); - InitialSearchPhase.SearchPhaseResults consumer + InitialSearchPhase.SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(request, expectedNumResults); if (hasAggs && expectedNumResults > bufferSize) { assertThat("expectedNumResults: " + expectedNumResults + " bufferSize: " + bufferSize, @@ -354,7 +360,7 @@ public class SearchPhaseControllerTests extends ESTestCase { for (int iters = 0; iters < maxIters; iters++) { TopDocs[] topDocs = new TopDocs[randomIntBetween(2, 100)]; int numShards = topDocs.length; - AtomicArray resultProviderAtomicArray = generateQueryResults(numShards, Collections.emptyList(), + AtomicArray resultProviderAtomicArray = generateQueryResults(numShards, Collections.emptyList(), 2, randomBoolean()); if (randomBoolean()) { int maxNull = randomIntBetween(1, topDocs.length - 1); diff --git a/core/src/test/java/org/elasticsearch/action/search/SearchScrollRequestTests.java b/core/src/test/java/org/elasticsearch/action/search/SearchScrollRequestTests.java index e483c718ab8..9773d7320d0 100644 --- a/core/src/test/java/org/elasticsearch/action/search/SearchScrollRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/SearchScrollRequestTests.java @@ -65,7 +65,7 @@ public class SearchScrollRequestTests extends ESTestCase { } public static SearchScrollRequest createSearchScrollRequest() { - SearchScrollRequest searchScrollRequest = new SearchScrollRequest(randomAsciiOfLengthBetween(3, 10)); + SearchScrollRequest searchScrollRequest = new SearchScrollRequest(randomAlphaOfLengthBetween(3, 10)); searchScrollRequest.scroll(randomPositiveTimeValue()); return searchScrollRequest; } diff --git a/core/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java b/core/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java index 94a7e1d44bb..96afbb276d5 100644 --- a/core/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java @@ -35,11 +35,11 @@ import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; public class ShardSearchFailureTests extends ESTestCase { public static ShardSearchFailure createTestItem() { - String randomMessage = randomAsciiOfLengthBetween(3, 20); + String randomMessage = randomAlphaOfLengthBetween(3, 20); Exception ex = new ParsingException(0, 0, randomMessage , new IllegalArgumentException("some bad argument")); - String nodeId = randomAsciiOfLengthBetween(5, 10); - String indexName = randomAsciiOfLengthBetween(5, 10); - String indexUuid = randomAsciiOfLengthBetween(5, 10); + String nodeId = randomAlphaOfLengthBetween(5, 10); + String indexName = randomAlphaOfLengthBetween(5, 10); + String indexUuid = randomAlphaOfLengthBetween(5, 10); int shardId = randomInt(); return new ShardSearchFailure(ex, new SearchShardTarget(nodeId, new ShardId(new Index(indexName, indexUuid), shardId))); diff --git a/core/src/test/java/org/elasticsearch/action/support/ActiveShardCountTests.java b/core/src/test/java/org/elasticsearch/action/support/ActiveShardCountTests.java index 598a672fb2d..4fb03bf393b 100644 --- a/core/src/test/java/org/elasticsearch/action/support/ActiveShardCountTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/ActiveShardCountTests.java @@ -62,7 +62,7 @@ public class ActiveShardCountTests extends ESTestCase { assertSame(ActiveShardCount.parseString("0"), ActiveShardCount.NONE); int value = randomIntBetween(1, 50); assertEquals(ActiveShardCount.parseString(value + ""), ActiveShardCount.from(value)); - expectThrows(IllegalArgumentException.class, () -> ActiveShardCount.parseString(randomAsciiOfLengthBetween(4, 8))); + expectThrows(IllegalArgumentException.class, () -> ActiveShardCount.parseString(randomAlphaOfLengthBetween(4, 8))); expectThrows(IllegalArgumentException.class, () -> ActiveShardCount.parseString("-1")); // magic numbers not exposed through API expectThrows(IllegalArgumentException.class, () -> ActiveShardCount.parseString("-2")); expectThrows(IllegalArgumentException.class, () -> ActiveShardCount.parseString(randomIntBetween(-10, -3) + "")); @@ -200,7 +200,7 @@ public class ActiveShardCountTests extends ESTestCase { final IndexShardRoutingTable shardRoutingTable = shardEntry.value; for (ShardRouting shardRouting : shardRoutingTable.getShards()) { if (shardRouting.primary()) { - shardRouting = shardRouting.initialize(randomAsciiOfLength(8), null, shardRouting.getExpectedShardSize()) + shardRouting = shardRouting.initialize(randomAlphaOfLength(8), null, shardRouting.getExpectedShardSize()) .moveToStarted(); } newIndexRoutingTable.addShard(shardRouting); @@ -224,7 +224,7 @@ public class ActiveShardCountTests extends ESTestCase { assertTrue(shardRouting.active()); } else { if (numToStart > 0) { - shardRouting = shardRouting.initialize(randomAsciiOfLength(8), null, shardRouting.getExpectedShardSize()) + shardRouting = shardRouting.initialize(randomAlphaOfLength(8), null, shardRouting.getExpectedShardSize()) .moveToStarted(); numToStart--; } @@ -250,7 +250,7 @@ public class ActiveShardCountTests extends ESTestCase { } else { if (shardRouting.active() == false) { if (numToStart > 0) { - shardRouting = shardRouting.initialize(randomAsciiOfLength(8), null, shardRouting.getExpectedShardSize()) + shardRouting = shardRouting.initialize(randomAlphaOfLength(8), null, shardRouting.getExpectedShardSize()) .moveToStarted(); numToStart--; } @@ -276,7 +276,7 @@ public class ActiveShardCountTests extends ESTestCase { assertTrue(shardRouting.active()); } else { if (shardRouting.active() == false) { - shardRouting = shardRouting.initialize(randomAsciiOfLength(8), null, shardRouting.getExpectedShardSize()) + shardRouting = shardRouting.initialize(randomAlphaOfLength(8), null, shardRouting.getExpectedShardSize()) .moveToStarted(); } } diff --git a/core/src/test/java/org/elasticsearch/action/support/AutoCreateIndexTests.java b/core/src/test/java/org/elasticsearch/action/support/AutoCreateIndexTests.java index 189fd1de59c..159be84de07 100644 --- a/core/src/test/java/org/elasticsearch/action/support/AutoCreateIndexTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/AutoCreateIndexTests.java @@ -85,24 +85,24 @@ public class AutoCreateIndexTests extends ESTestCase { Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), false).build(); AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings); IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> - autoCreateIndex.shouldAutoCreate(randomAsciiOfLengthBetween(1, 10), buildClusterState())); + autoCreateIndex.shouldAutoCreate(randomAlphaOfLengthBetween(1, 10), buildClusterState())); assertEquals("no such index and [action.auto_create_index] is [false]", e.getMessage()); } public void testAutoCreationEnabled() { Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), true).build(); AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings); - assertThat(autoCreateIndex.shouldAutoCreate(randomAsciiOfLengthBetween(1, 10), buildClusterState()), equalTo(true)); + assertThat(autoCreateIndex.shouldAutoCreate(randomAlphaOfLengthBetween(1, 10), buildClusterState()), equalTo(true)); } public void testDefaultAutoCreation() { AutoCreateIndex autoCreateIndex = newAutoCreateIndex(Settings.EMPTY); - assertThat(autoCreateIndex.shouldAutoCreate(randomAsciiOfLengthBetween(1, 10), buildClusterState()), equalTo(true)); + assertThat(autoCreateIndex.shouldAutoCreate(randomAlphaOfLengthBetween(1, 10), buildClusterState()), equalTo(true)); } public void testExistingIndex() { Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), randomFrom(true, false, - randomAsciiOfLengthBetween(7, 10))).build(); + randomAlphaOfLengthBetween(7, 10))).build(); AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings); assertThat(autoCreateIndex.shouldAutoCreate(randomFrom("index1", "index2", "index3"), buildClusterState("index1", "index2", "index3")), equalTo(false)); @@ -110,11 +110,11 @@ public class AutoCreateIndexTests extends ESTestCase { public void testDynamicMappingDisabled() { Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), randomFrom(true, - randomAsciiOfLengthBetween(1, 10))) + randomAlphaOfLengthBetween(1, 10))) .put(MapperService.INDEX_MAPPER_DYNAMIC_SETTING.getKey(), false).build(); AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings); IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> - autoCreateIndex.shouldAutoCreate(randomAsciiOfLengthBetween(1, 10), buildClusterState())); + autoCreateIndex.shouldAutoCreate(randomAlphaOfLengthBetween(1, 10), buildClusterState())); assertEquals("no such index and [index.mapper.dynamic] is [false]", e.getMessage()); } @@ -123,18 +123,18 @@ public class AutoCreateIndexTests extends ESTestCase { .build(); AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings); ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build(); - assertThat(autoCreateIndex.shouldAutoCreate("index" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(true)); - expectNotMatch(clusterState, autoCreateIndex, "does_not_match" + randomAsciiOfLengthBetween(1, 5)); + assertThat(autoCreateIndex.shouldAutoCreate("index" + randomAlphaOfLengthBetween(1, 5), clusterState), equalTo(true)); + expectNotMatch(clusterState, autoCreateIndex, "does_not_match" + randomAlphaOfLengthBetween(1, 5)); } public void testAutoCreationPatternDisabled() { Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), "-index*").build(); AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings); ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build(); - expectForbidden(clusterState, autoCreateIndex, "index" + randomAsciiOfLengthBetween(1, 5), "-index*"); + expectForbidden(clusterState, autoCreateIndex, "index" + randomAlphaOfLengthBetween(1, 5), "-index*"); /* When patterns are specified, even if the are all negative, the default is can't create. So a pure negative pattern is the same * as false, really. */ - expectNotMatch(clusterState, autoCreateIndex, "does_not_match" + randomAsciiOfLengthBetween(1, 5)); + expectNotMatch(clusterState, autoCreateIndex, "does_not_match" + randomAlphaOfLengthBetween(1, 5)); } public void testAutoCreationMultiplePatternsWithWildcards() { @@ -142,9 +142,9 @@ public class AutoCreateIndexTests extends ESTestCase { randomFrom("+test*,-index*", "test*,-index*")).build(); AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings); ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build(); - expectForbidden(clusterState, autoCreateIndex, "index" + randomAsciiOfLengthBetween(1, 5), "-index*"); - assertThat(autoCreateIndex.shouldAutoCreate("test" + randomAsciiOfLengthBetween(1, 5), clusterState), equalTo(true)); - expectNotMatch(clusterState, autoCreateIndex, "does_not_match" + randomAsciiOfLengthBetween(1, 5)); + expectForbidden(clusterState, autoCreateIndex, "index" + randomAlphaOfLengthBetween(1, 5), "-index*"); + assertThat(autoCreateIndex.shouldAutoCreate("test" + randomAlphaOfLengthBetween(1, 5), clusterState), equalTo(true)); + expectNotMatch(clusterState, autoCreateIndex, "does_not_match" + randomAlphaOfLengthBetween(1, 5)); } public void testAutoCreationMultiplePatternsNoWildcards() { @@ -152,9 +152,9 @@ public class AutoCreateIndexTests extends ESTestCase { AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings); ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build(); assertThat(autoCreateIndex.shouldAutoCreate("test1", clusterState), equalTo(true)); - expectNotMatch(clusterState, autoCreateIndex, "index" + randomAsciiOfLengthBetween(1, 5)); - expectNotMatch(clusterState, autoCreateIndex, "test" + randomAsciiOfLengthBetween(2, 5)); - expectNotMatch(clusterState, autoCreateIndex, "does_not_match" + randomAsciiOfLengthBetween(1, 5)); + expectNotMatch(clusterState, autoCreateIndex, "index" + randomAlphaOfLengthBetween(1, 5)); + expectNotMatch(clusterState, autoCreateIndex, "test" + randomAlphaOfLengthBetween(2, 5)); + expectNotMatch(clusterState, autoCreateIndex, "does_not_match" + randomAlphaOfLengthBetween(1, 5)); } public void testAutoCreationMultipleIndexNames() { @@ -163,7 +163,7 @@ public class AutoCreateIndexTests extends ESTestCase { ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build(); assertThat(autoCreateIndex.shouldAutoCreate("test1", clusterState), equalTo(true)); assertThat(autoCreateIndex.shouldAutoCreate("test2", clusterState), equalTo(true)); - expectNotMatch(clusterState, autoCreateIndex, "does_not_match" + randomAsciiOfLengthBetween(1, 5)); + expectNotMatch(clusterState, autoCreateIndex, "does_not_match" + randomAlphaOfLengthBetween(1, 5)); } public void testAutoCreationConflictingPatternsFirstWins() { @@ -173,7 +173,7 @@ public class AutoCreateIndexTests extends ESTestCase { ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder()).build(); assertThat(autoCreateIndex.shouldAutoCreate("test1", clusterState), equalTo(true)); expectForbidden(clusterState, autoCreateIndex, "test2", "-test2"); - expectNotMatch(clusterState, autoCreateIndex, "does_not_match" + randomAsciiOfLengthBetween(1, 5)); + expectNotMatch(clusterState, autoCreateIndex, "does_not_match" + randomAlphaOfLengthBetween(1, 5)); } public void testUpdate() { diff --git a/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java b/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java index 230812bec88..aa61c2569d8 100644 --- a/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java @@ -41,8 +41,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Function; -import java.util.stream.IntStream; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; @@ -69,7 +67,7 @@ public class TransportActionFilterChainTests extends ESTestCase { filters.add(new RequestTestFilter(order, randomFrom(RequestOperation.values()))); } - String actionName = randomAsciiOfLength(randomInt(30)); + String actionName = randomAlphaOfLength(randomInt(30)); ActionFilters actionFilters = new ActionFilters(filters); TransportAction transportAction = new TransportAction(Settings.EMPTY, actionName, null, actionFilters, null, new TaskManager(Settings.EMPTY)) { @Override @@ -153,7 +151,7 @@ public class TransportActionFilterChainTests extends ESTestCase { Set filters = new HashSet<>(); filters.add(testFilter); - String actionName = randomAsciiOfLength(randomInt(30)); + String actionName = randomAlphaOfLength(randomInt(30)); ActionFilters actionFilters = new ActionFilters(filters); TransportAction transportAction = new TransportAction(Settings.EMPTY, actionName, null, actionFilters, null, new TaskManager(Settings.EMPTY)) { @Override diff --git a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java index de3af56a120..c3ca62616fd 100644 --- a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java @@ -113,9 +113,9 @@ public class TransportNodesActionTests extends ESTestCase { nodeResponses.add(new OtherNodeResponse()); List failures = mockList( () -> new FailedNodeException( - randomAsciiOfLength(8), - randomAsciiOfLength(8), - new IllegalStateException(randomAsciiOfLength(8))), + randomAlphaOfLength(8), + randomAlphaOfLength(8), + new IllegalStateException(randomAlphaOfLength(8))), randomIntBetween(0, 2)); List allResponses = new ArrayList<>(expectedNodeResponses); @@ -192,7 +192,7 @@ public class TransportNodesActionTests extends ESTestCase { Map attributes = new HashMap<>(); Set roles = new HashSet<>(randomSubsetOf(Arrays.asList(DiscoveryNode.Role.values()))); if (frequently()) { - attributes.put("custom", randomBoolean() ? "match" : randomAsciiOfLengthBetween(3, 5)); + attributes.put("custom", randomBoolean() ? "match" : randomAlphaOfLengthBetween(3, 5)); } final DiscoveryNode node = newNode(i, attributes, roles); discoBuilder = discoBuilder.add(node); diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java index 459bafd3af2..7447e9fb559 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java @@ -175,7 +175,7 @@ public class ReplicationOperationTests extends ESTestCase { } // add in-sync allocation id that doesn't have a corresponding routing entry state = ClusterState.builder(state).metaData(MetaData.builder(state.metaData()).put(IndexMetaData.builder(indexMetaData) - .putInSyncAllocationIds(0, Sets.union(indexMetaData.inSyncAllocationIds(0), Sets.newHashSet(randomAsciiOfLength(10)))))) + .putInSyncAllocationIds(0, Sets.union(indexMetaData.inSyncAllocationIds(0), Sets.newHashSet(randomAlphaOfLength(10)))))) .build(); final Set expectedReplicas = getExpectedReplicas(shardId, state); diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index 1bd94370470..abe0e9977dd 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -64,7 +64,6 @@ import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.cluster.ClusterStateChanges; -import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.CapturingTransport; @@ -626,7 +625,7 @@ public class TransportReplicationActionTests extends ESTestCase { assertThat(captures, arrayWithSize(1)); if (randomBoolean()) { final TransportReplicationAction.ReplicaResponse response = - new TransportReplicationAction.ReplicaResponse(randomAsciiOfLength(10), randomLong()); + new TransportReplicationAction.ReplicaResponse(randomAlphaOfLength(10), randomLong()); transport.handleResponse(captures[0].requestId, response); assertTrue(listener.isDone()); assertThat(listener.get(), equalTo(response)); diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java index 781059fd859..3d57c27e373 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java @@ -20,14 +20,11 @@ package org.elasticsearch.action.support.replication; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.action.support.WriteResponse; -import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; -import org.elasticsearch.action.support.replication.ReplicationOperation; import org.elasticsearch.action.support.replication.ReplicationOperation.ReplicaResponse; import org.elasticsearch.client.transport.NoNodeAvailableException; import org.elasticsearch.cluster.ClusterState; @@ -56,7 +53,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportService; @@ -290,7 +286,7 @@ public class TransportWriteActionTests extends ESTestCase { assertThat(captures, arrayWithSize(1)); if (randomBoolean()) { final TransportReplicationAction.ReplicaResponse response = - new TransportReplicationAction.ReplicaResponse(randomAsciiOfLength(10), randomLong()); + new TransportReplicationAction.ReplicaResponse(randomAlphaOfLength(10), randomLong()); transport.handleResponse(captures[0].requestId, response); assertTrue(listener.isDone()); assertThat(listener.get(), equalTo(response)); diff --git a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index 4b11697c16d..4958e1c78aa 100644 --- a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -21,10 +21,12 @@ package org.elasticsearch.action.update; import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; @@ -43,16 +45,20 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.RandomObjects; import org.elasticsearch.watcher.ResourceWatcherService; +import org.junit.Before; import java.io.IOException; import java.nio.file.Path; -import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.function.Function; +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; +import static java.util.Collections.singletonList; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.elasticsearch.script.MockScriptEngine.mockInlineScript; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.equalTo; @@ -61,6 +67,66 @@ import static org.hamcrest.Matchers.notNullValue; public class UpdateRequestTests extends ESTestCase { + private UpdateHelper updateHelper; + + @Before + public void setUp() throws Exception { + super.setUp(); + final Path genericConfigFolder = createTempDir(); + final Settings baseSettings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), genericConfigFolder) + .build(); + final Environment environment = new Environment(baseSettings); + final Map, Object>> scripts = new HashMap<>(); + scripts.put( + "ctx._source.update_timestamp = ctx._now", + vars -> { + @SuppressWarnings("unchecked") + final Map ctx = (Map) vars.get("ctx"); + @SuppressWarnings("unchecked") + final Map source = (Map) ctx.get("_source"); + source.put("update_timestamp", ctx.get("_now")); + return null; + }); + scripts.put( + "ctx._timestamp = ctx._now", + vars -> { + @SuppressWarnings("unchecked") + final Map ctx = (Map) vars.get("ctx"); + ctx.put("_timestamp", ctx.get("_now")); + return null; + }); + scripts.put( + "ctx.op = delete", + vars -> { + @SuppressWarnings("unchecked") + final Map ctx = (Map) vars.get("ctx"); + ctx.put("op", "delete"); + return null; + }); + scripts.put("return", vars -> null); + final ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(emptyList()); + final MockScriptEngine engine = new MockScriptEngine("mock", scripts); + final ScriptEngineRegistry scriptEngineRegistry = + new ScriptEngineRegistry(singletonList(engine)); + + final ScriptSettings scriptSettings = + new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); + final ResourceWatcherService watcherService = + new ResourceWatcherService(baseSettings, null); + ScriptService scriptService = new ScriptService( + baseSettings, + environment, + watcherService, + scriptEngineRegistry, + scriptContextRegistry, + scriptSettings); + final Settings settings = settings(Version.CURRENT).build(); + + updateHelper = new UpdateHelper(settings, scriptService); + } + public void testFromXContent() throws Exception { UpdateRequest request = new UpdateRequest("test", "type", "1"); // simple script @@ -74,7 +140,7 @@ public class UpdateRequestTests extends ESTestCase { assertThat(script.getType(), equalTo(ScriptType.INLINE)); assertThat(script.getLang(), equalTo(Script.DEFAULT_SCRIPT_LANG)); Map params = script.getParams(); - assertThat(params, equalTo(Collections.emptyMap())); + assertThat(params, equalTo(emptyMap())); // simple verbose script request.fromXContent(createParser(XContentFactory.jsonBuilder().startObject() @@ -86,7 +152,7 @@ public class UpdateRequestTests extends ESTestCase { assertThat(script.getType(), equalTo(ScriptType.INLINE)); assertThat(script.getLang(), equalTo(Script.DEFAULT_SCRIPT_LANG)); params = script.getParams(); - assertThat(params, equalTo(Collections.emptyMap())); + assertThat(params, equalTo(emptyMap())); // script with params request = new UpdateRequest("test", "type", "1"); @@ -258,39 +324,6 @@ public class UpdateRequestTests extends ESTestCase { } public void testNowInScript() throws IOException { - Path genericConfigFolder = createTempDir(); - Settings baseSettings = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put(Environment.PATH_CONF_SETTING.getKey(), genericConfigFolder) - .build(); - Environment environment = new Environment(baseSettings); - Map, Object>> scripts = new HashMap<>(); - scripts.put("ctx._source.update_timestamp = ctx._now", - (vars) -> { - Map vars2 = vars; - @SuppressWarnings("unchecked") - Map ctx = (Map) vars2.get("ctx"); - @SuppressWarnings("unchecked") - Map source = (Map) ctx.get("_source"); - source.put("update_timestamp", ctx.get("_now")); - return null;}); - scripts.put("ctx._timestamp = ctx._now", - (vars) -> { - @SuppressWarnings("unchecked") - Map ctx = (Map) vars.get("ctx"); - ctx.put("_timestamp", ctx.get("_now")); - return null;}); - ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList()); - ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new MockScriptEngine("mock", - scripts))); - - ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); - ScriptService scriptService = new ScriptService(baseSettings, environment, - new ResourceWatcherService(baseSettings, null), scriptEngineRegistry, scriptContextRegistry, scriptSettings); - Settings settings = settings(Version.CURRENT).build(); - - UpdateHelper updateHelper = new UpdateHelper(settings, scriptService); - // We just upsert one document with now() using a script IndexRequest indexRequest = new IndexRequest("test", "type1", "2") .source(jsonBuilder().startObject().field("foo", "bar").endObject()); @@ -298,7 +331,7 @@ public class UpdateRequestTests extends ESTestCase { { UpdateRequest updateRequest = new UpdateRequest("test", "type1", "2") .upsert(indexRequest) - .script(new Script(ScriptType.INLINE, "mock", "ctx._source.update_timestamp = ctx._now", Collections.emptyMap())) + .script(mockInlineScript("ctx._source.update_timestamp = ctx._now")) .scriptedUpsert(true); long nowInMillis = randomNonNegativeLong(); // We simulate that the document is not existing yet @@ -307,12 +340,12 @@ public class UpdateRequestTests extends ESTestCase { Streamable action = result.action(); assertThat(action, instanceOf(IndexRequest.class)); IndexRequest indexAction = (IndexRequest) action; - assertEquals(indexAction.sourceAsMap().get("update_timestamp"), nowInMillis); + assertEquals(nowInMillis, indexAction.sourceAsMap().get("update_timestamp")); } { UpdateRequest updateRequest = new UpdateRequest("test", "type1", "2") .upsert(indexRequest) - .script(new Script(ScriptType.INLINE, "mock", "ctx._timestamp = ctx._now", Collections.emptyMap())) + .script(mockInlineScript("ctx._timestamp = ctx._now")) .scriptedUpsert(true); // We simulate that the document is not existing yet GetResult getResult = new GetResult("test", "type1", "2", 0, true, new BytesArray("{}"), null); @@ -322,6 +355,57 @@ public class UpdateRequestTests extends ESTestCase { } } + public void testIndexTimeout() { + final GetResult getResult = + new GetResult("test", "type", "1", 0, true, new BytesArray("{\"f\":\"v\"}"), null); + final UpdateRequest updateRequest = + new UpdateRequest("test", "type", "1") + .script(mockInlineScript("return")) + .timeout(randomTimeValue()); + runTimeoutTest(getResult, updateRequest); + } + + public void testDeleteTimeout() { + final GetResult getResult = + new GetResult("test", "type", "1", 0, true, new BytesArray("{\"f\":\"v\"}"), null); + final UpdateRequest updateRequest = + new UpdateRequest("test", "type", "1") + .script(mockInlineScript("ctx.op = delete")) + .timeout(randomTimeValue()); + runTimeoutTest(getResult, updateRequest); + } + + public void testUpsertTimeout() throws IOException { + final boolean exists = randomBoolean(); + final BytesReference source = exists ? new BytesArray("{\"f\":\"v\"}") : null; + final GetResult getResult = new GetResult("test", "type", "1", 0, exists, source, null); + final XContentBuilder sourceBuilder = jsonBuilder(); + sourceBuilder.startObject(); + { + sourceBuilder.field("f", "v"); + } + sourceBuilder.endObject(); + final IndexRequest upsert = new IndexRequest("test", "type", "1").source(sourceBuilder); + final UpdateRequest updateRequest = + new UpdateRequest("test", "type", "1") + .upsert(upsert) + .script(mockInlineScript("return")) + .timeout(randomTimeValue()); + runTimeoutTest(getResult, updateRequest); + } + + private void runTimeoutTest(final GetResult getResult, final UpdateRequest updateRequest) { + final UpdateHelper.Result result = updateHelper.prepare( + new ShardId("test", "", 0), + updateRequest, + getResult, + ESTestCase::randomNonNegativeLong); + final Streamable action = result.action(); + assertThat(action, instanceOf(ReplicationRequest.class)); + final ReplicationRequest request = (ReplicationRequest) action; + assertThat(request.timeout(), equalTo(updateRequest.timeout())); + } + public void testToAndFromXContent() throws IOException { UpdateRequest updateRequest = new UpdateRequest(); updateRequest.detectNoop(randomBoolean()); @@ -333,12 +417,12 @@ public class UpdateRequestTests extends ESTestCase { updateRequest.docAsUpsert(randomBoolean()); } else { ScriptType scriptType = randomFrom(ScriptType.values()); - String scriptLang = (scriptType != ScriptType.STORED) ? randomAsciiOfLength(10) : null; - String scriptIdOrCode = randomAsciiOfLength(10); + String scriptLang = (scriptType != ScriptType.STORED) ? randomAlphaOfLength(10) : null; + String scriptIdOrCode = randomAlphaOfLength(10); int nbScriptParams = randomIntBetween(0, 5); Map scriptParams = new HashMap<>(nbScriptParams); for (int i = 0; i < nbScriptParams; i++) { - scriptParams.put(randomAsciiOfLength(5), randomAsciiOfLength(5)); + scriptParams.put(randomAlphaOfLength(5), randomAlphaOfLength(5)); } updateRequest.script(new Script(scriptType, scriptLang, scriptIdOrCode, scriptParams)); updateRequest.scriptedUpsert(randomBoolean()); @@ -351,7 +435,7 @@ public class UpdateRequestTests extends ESTestCase { if (randomBoolean()) { String[] fields = new String[randomIntBetween(0, 5)]; for (int i = 0; i < fields.length; i++) { - fields[i] = randomAsciiOfLength(5); + fields[i] = randomAlphaOfLength(5); } updateRequest.fields(fields); } @@ -361,11 +445,11 @@ public class UpdateRequestTests extends ESTestCase { } else { String[] includes = new String[randomIntBetween(0, 5)]; for (int i = 0; i < includes.length; i++) { - includes[i] = randomAsciiOfLength(5); + includes[i] = randomAlphaOfLength(5); } String[] excludes = new String[randomIntBetween(0, 5)]; for (int i = 0; i < excludes.length; i++) { - excludes[i] = randomAsciiOfLength(5); + excludes[i] = randomAlphaOfLength(5); } if (randomBoolean()) { updateRequest.fetchSource(includes, excludes); diff --git a/core/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java b/core/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java index 2b73b1964ee..bd9e3551821 100644 --- a/core/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java @@ -130,7 +130,7 @@ public class UpdateResponseTests extends ESTestCase { String id = actualGetResult.getId(); long version = actualGetResult.getVersion(); DocWriteResponse.Result result = actualGetResult.isExists() ? DocWriteResponse.Result.UPDATED : DocWriteResponse.Result.NOT_FOUND; - String indexUUid = randomAsciiOfLength(5); + String indexUUid = randomAlphaOfLength(5); int shardId = randomIntBetween(0, 5); // We also want small number values (randomNonNegativeLong() tend to generate high numbers) diff --git a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java index bd553cff6e1..0b1c04ac5cc 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java @@ -44,6 +44,7 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.hasToString; +import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; @@ -98,7 +99,9 @@ public class BootstrapChecksTests extends ESTestCase { when(boundTransportAddress.boundAddresses()).thenReturn(transportAddresses.toArray(new TransportAddress[0])); when(boundTransportAddress.publishAddress()).thenReturn(publishAddress); - assertTrue(BootstrapChecks.enforceLimits(boundTransportAddress)); + final String discoveryType = randomFrom("zen", "single-node"); + + assertEquals(BootstrapChecks.enforceLimits(boundTransportAddress, discoveryType), !"single-node".equals(discoveryType)); } public void testEnforceLimitsWhenPublishingToNonLocalAddress() { @@ -114,7 +117,9 @@ public class BootstrapChecksTests extends ESTestCase { when(boundTransportAddress.boundAddresses()).thenReturn(transportAddresses.toArray(new TransportAddress[0])); when(boundTransportAddress.publishAddress()).thenReturn(publishAddress); - assertTrue(BootstrapChecks.enforceLimits(boundTransportAddress)); + final String discoveryType = randomFrom("zen", "single-node"); + + assertEquals(BootstrapChecks.enforceLimits(boundTransportAddress, discoveryType), !"single-node".equals(discoveryType)); } public void testExceptionAggregation() { @@ -483,7 +488,7 @@ public class BootstrapChecksTests extends ESTestCase { } }; - final String command = randomAsciiOfLength(16); + final String command = randomAlphaOfLength(16); runMightForkTest( check, isSystemCallFilterInstalled, @@ -511,7 +516,7 @@ public class BootstrapChecksTests extends ESTestCase { } }; - final String command = randomAsciiOfLength(16); + final String command = randomAlphaOfLength(16); runMightForkTest( check, isSystemCallFilterInstalled, @@ -560,12 +565,48 @@ public class BootstrapChecksTests extends ESTestCase { consumer.accept(e); } + public void testEarlyAccessCheck() throws NodeValidationException { + final AtomicReference javaVersion + = new AtomicReference<>(randomFrom("1.8.0_152-ea", "9-ea")); + final BootstrapChecks.EarlyAccessCheck eaCheck = new BootstrapChecks.EarlyAccessCheck() { + + @Override + String jvmVendor() { + return "Oracle Corporation"; + } + + @Override + String javaVersion() { + return javaVersion.get(); + } + + }; + + final List checks = Collections.singletonList(eaCheck); + final NodeValidationException e = expectThrows( + NodeValidationException.class, + () -> { + BootstrapChecks.check(true, checks, "testEarlyAccessCheck"); + }); + assertThat( + e.getMessage(), + containsString( + "Java version [" + + javaVersion.get() + + "] is an early-access build, only use release builds")); + + // if not on an early-access build, nothing should happen + javaVersion.set(randomFrom("1.8.0_152", "9")); + BootstrapChecks.check(true, checks, "testEarlyAccessCheck"); + + } + public void testG1GCCheck() throws NodeValidationException { final AtomicBoolean isG1GCEnabled = new AtomicBoolean(true); final AtomicBoolean isJava8 = new AtomicBoolean(true); final AtomicReference jvmVersion = new AtomicReference<>(String.format(Locale.ROOT, "25.%d-b%d", randomIntBetween(0, 39), randomIntBetween(1, 128))); - final BootstrapChecks.G1GCCheck oracleCheck = new BootstrapChecks.G1GCCheck() { + final BootstrapChecks.G1GCCheck g1GCCheck = new BootstrapChecks.G1GCCheck() { @Override String jvmVendor() { @@ -592,7 +633,7 @@ public class BootstrapChecksTests extends ESTestCase { final NodeValidationException e = expectThrows( NodeValidationException.class, - () -> BootstrapChecks.check(true, Collections.singletonList(oracleCheck), "testG1GCCheck")); + () -> BootstrapChecks.check(true, Collections.singletonList(g1GCCheck), "testG1GCCheck")); assertThat( e.getMessage(), containsString( @@ -600,18 +641,18 @@ public class BootstrapChecksTests extends ESTestCase { // if G1GC is disabled, nothing should happen isG1GCEnabled.set(false); - BootstrapChecks.check(true, Collections.singletonList(oracleCheck), "testG1GCCheck"); + BootstrapChecks.check(true, Collections.singletonList(g1GCCheck), "testG1GCCheck"); // if on or after update 40, nothing should happen independent of whether or not G1GC is enabled isG1GCEnabled.set(randomBoolean()); jvmVersion.set(String.format(Locale.ROOT, "25.%d-b%d", randomIntBetween(40, 112), randomIntBetween(1, 128))); - BootstrapChecks.check(true, Collections.singletonList(oracleCheck), "testG1GCCheck"); + BootstrapChecks.check(true, Collections.singletonList(g1GCCheck), "testG1GCCheck"); final BootstrapChecks.G1GCCheck nonOracleCheck = new BootstrapChecks.G1GCCheck() { @Override String jvmVendor() { - return randomAsciiOfLength(8); + return randomAlphaOfLength(8); } }; diff --git a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandlerTests.java b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandlerTests.java index e4ff83e9b40..6e40153b467 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandlerTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandlerTests.java @@ -60,7 +60,7 @@ public class ElasticsearchUncaughtExceptionHandlerTests extends ESTestCase { new IOError(new IOException("fatal")), new Error() {}); final Thread thread = new Thread(() -> { throw error; }); - final String name = randomAsciiOfLength(10); + final String name = randomAlphaOfLength(10); thread.setName(name); final AtomicBoolean halt = new AtomicBoolean(); final AtomicInteger observedStatus = new AtomicInteger(); @@ -103,7 +103,7 @@ public class ElasticsearchUncaughtExceptionHandlerTests extends ESTestCase { public void testUncaughtException() throws InterruptedException { final RuntimeException e = new RuntimeException("boom"); final Thread thread = new Thread(() -> { throw e; }); - final String name = randomAsciiOfLength(10); + final String name = randomAlphaOfLength(10); thread.setName(name); final AtomicReference threadNameReference = new AtomicReference<>(); final AtomicReference throwableReference = new AtomicReference<>(); diff --git a/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java b/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java index d3a0e0b3e4e..43d866a47b7 100644 --- a/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java +++ b/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java @@ -40,8 +40,6 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; -import org.junit.After; -import org.junit.Before; import java.util.HashMap; import java.util.Map; @@ -122,7 +120,7 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase { } public void testOverrideHeader() throws Exception { - String key1Val = randomAsciiOfLength(5); + String key1Val = randomAlphaOfLength(5); Map expected = new HashMap<>(); expected.put("key1", key1Val); expected.put("key2", "val 2"); diff --git a/core/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java b/core/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java index 35406ef1153..ee577d4df2f 100644 --- a/core/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java +++ b/core/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java @@ -33,7 +33,7 @@ import org.elasticsearch.test.client.NoOpClient; public class ParentTaskAssigningClientTests extends ESTestCase { public void testSetsParentId() { - TaskId[] parentTaskId = new TaskId[] {new TaskId(randomAsciiOfLength(3), randomLong())}; + TaskId[] parentTaskId = new TaskId[] {new TaskId(randomAlphaOfLength(3), randomLong())}; // This mock will do nothing but verify that parentTaskId is set on all requests sent to it. NoOpClient mock = new NoOpClient(getTestName()) { diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterChangedEventTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterChangedEventTests.java index 240add84464..b7ea45dd13a 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterChangedEventTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterChangedEventTests.java @@ -529,7 +529,7 @@ public class ClusterChangedEventTests extends ESTestCase { default: throw new AssertionError("Unhandled mode [" + deletionQuantity + "]"); } final boolean changeClusterUUID = randomBoolean(); - final List addedIndices = addIndices(numAdd, randomAsciiOfLengthBetween(5, 10)); + final List addedIndices = addIndices(numAdd, randomAlphaOfLengthBetween(5, 10)); List delIndices; if (changeClusterUUID) { delIndices = new ArrayList<>(); diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoTests.java index 1fcab355d65..35a7ea2aab8 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoTests.java @@ -46,9 +46,9 @@ public class ClusterInfoTests extends ESTestCase { int numEntries = randomIntBetween(0, 128); ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(numEntries); for (int i = 0; i < numEntries; i++) { - String key = randomAsciiOfLength(32); + String key = randomAlphaOfLength(32); DiskUsage diskUsage = new DiskUsage( - randomAsciiOfLength(4), randomAsciiOfLength(4), randomAsciiOfLength(4), + randomAlphaOfLength(4), randomAlphaOfLength(4), randomAlphaOfLength(4), randomIntBetween(0, Integer.MAX_VALUE), randomIntBetween(0, Integer.MAX_VALUE) ); builder.put(key, diskUsage); @@ -60,7 +60,7 @@ public class ClusterInfoTests extends ESTestCase { int numEntries = randomIntBetween(0, 128); ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(numEntries); for (int i = 0; i < numEntries; i++) { - String key = randomAsciiOfLength(32); + String key = randomAlphaOfLength(32); long shardSize = randomIntBetween(0, Integer.MAX_VALUE); builder.put(key, shardSize); } @@ -71,9 +71,9 @@ public class ClusterInfoTests extends ESTestCase { int numEntries = randomIntBetween(0, 128); ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(numEntries); for (int i = 0; i < numEntries; i++) { - ShardId shardId = new ShardId(randomAsciiOfLength(32), randomAsciiOfLength(32), randomIntBetween(0, Integer.MAX_VALUE)); + ShardId shardId = new ShardId(randomAlphaOfLength(32), randomAlphaOfLength(32), randomIntBetween(0, Integer.MAX_VALUE)); ShardRouting shardRouting = TestShardRouting.newShardRouting(shardId, null, randomBoolean(), ShardRoutingState.UNASSIGNED); - builder.put(shardRouting, randomAsciiOfLength(32)); + builder.put(shardRouting, randomAlphaOfLength(32)); } return builder.build(); } diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java index c4241a2a989..51c66033ca9 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java @@ -92,7 +92,7 @@ public class ClusterModuleTests extends ModuleTestCase { public void testRegisterClusterDynamicSetting() { SettingsModule module = new SettingsModule(Settings.EMPTY, Setting.boolSetting("foo.bar", false, Property.Dynamic, Property.NodeScope)); - assertInstanceBinding(module, ClusterSettings.class, service -> service.hasDynamicSetting("foo.bar")); + assertInstanceBinding(module, ClusterSettings.class, service -> service.isDynamicSetting("foo.bar")); } public void testRegisterIndexDynamicSettingDuplicate() { @@ -107,7 +107,7 @@ public class ClusterModuleTests extends ModuleTestCase { public void testRegisterIndexDynamicSetting() { SettingsModule module = new SettingsModule(Settings.EMPTY, Setting.boolSetting("index.foo.bar", false, Property.Dynamic, Property.IndexScope)); - assertInstanceBinding(module, IndexScopedSettings.class, service -> service.hasDynamicSetting("index.foo.bar")); + assertInstanceBinding(module, IndexScopedSettings.class, service -> service.isDynamicSetting("index.foo.bar")); } public void testRegisterAllocationDeciderDuplicate() { diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java index 722f9590b56..29e3080bfe1 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java @@ -207,7 +207,7 @@ public class ClusterStateDiffIT extends ESIntegTestCase { } int additionalNodeCount = randomIntBetween(1, 20); for (int i = 0; i < additionalNodeCount; i++) { - nodes.add(new DiscoveryNode("node-" + randomAsciiOfLength(10), buildNewFakeTransportAddress(), + nodes.add(new DiscoveryNode("node-" + randomAlphaOfLength(10), buildNewFakeTransportAddress(), emptyMap(), emptySet(), randomVersion(random()))); } return ClusterState.builder(clusterState).nodes(nodes); @@ -250,7 +250,7 @@ public class ClusterStateDiffIT extends ESIntegTestCase { for (int j = 0; j < replicaCount; j++) { UnassignedInfo unassignedInfo = null; if (randomInt(5) == 1) { - unassignedInfo = new UnassignedInfo(randomReason(), randomAsciiOfLength(10)); + unassignedInfo = new UnassignedInfo(randomReason(), randomAlphaOfLength(10)); } if (availableNodeIds.isEmpty()) { break; @@ -420,7 +420,7 @@ public class ClusterStateDiffIT extends ESIntegTestCase { } int settingsCount = randomInt(10); for (int i = 0; i < settingsCount; i++) { - builder.put(randomAsciiOfLength(10), randomAsciiOfLength(10)); + builder.put(randomAlphaOfLength(10), randomAlphaOfLength(10)); } return builder.build(); @@ -541,7 +541,7 @@ public class ClusterStateDiffIT extends ESIntegTestCase { if (randomBoolean() && part.getAliases().isEmpty() == false) { builder.removeAlias(randomFrom(part.getAliases().keys().toArray(String.class))); } else { - builder.putAlias(AliasMetaData.builder(randomAsciiOfLength(10))); + builder.putAlias(AliasMetaData.builder(randomAlphaOfLength(10))); } break; case 2: @@ -606,7 +606,7 @@ public class ClusterStateDiffIT extends ESIntegTestCase { builder.filter(QueryBuilders.termQuery("test", randomRealisticUnicodeOfCodepointLength(10)).toString()); } if (randomBoolean()) { - builder.routing(randomAsciiOfLength(10)); + builder.routing(randomAlphaOfLength(10)); } return builder.build(); } diff --git a/core/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java b/core/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java index 0770fb2c4dc..88a0cfa8bc3 100644 --- a/core/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/SpecificMasterNodesIT.java @@ -33,17 +33,25 @@ import org.elasticsearch.test.ESIntegTestCase.Scope; import java.io.IOException; import java.util.Map; +import static org.elasticsearch.discovery.zen.ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -@ClusterScope(scope = Scope.TEST, numDataNodes = 0) +@ClusterScope(scope = Scope.TEST, numDataNodes = 0, autoMinMasterNodes = false) public class SpecificMasterNodesIT extends ESIntegTestCase { + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder().put(super.nodeSettings(nodeOrdinal)) + .put(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 1).build(); + } + public void testSimpleOnlyMasterNodeElection() throws IOException { logger.info("--> start data node / non master node"); - internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false).put("discovery.initial_state_timeout", "1s")); + internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false) + .put("discovery.initial_state_timeout", "1s")); try { assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("100ms").execute().actionGet().getState().nodes().getMasterNodeId(), nullValue()); fail("should not be able to find master"); diff --git a/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java b/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java index 229862afa9b..f2cd88a27a7 100644 --- a/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.LocalClusterUpdateTask; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -366,7 +365,7 @@ public class ClusterStateHealthTests extends ESTestCase { final Set nodeIds = new HashSet<>(); final int numNodes = randomIntBetween(numberOfReplicas + 1, 10); for (int i = 0; i < numNodes; i++) { - nodeIds.add(randomAsciiOfLength(8)); + nodeIds.add(randomAlphaOfLength(8)); } final List clusterStates = new ArrayList<>(); diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java index e47ca2184f8..4f235e52636 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java @@ -51,7 +51,7 @@ public class DateMathExpressionResolverTests extends ESTestCase { int numIndexExpressions = randomIntBetween(1, 9); List indexExpressions = new ArrayList<>(numIndexExpressions); for (int i = 0; i < numIndexExpressions; i++) { - indexExpressions.add(randomAsciiOfLength(10)); + indexExpressions.add(randomAlphaOfLength(10)); } List result = expressionResolver.resolve(context, indexExpressions); assertThat(result.size(), equalTo(indexExpressions.size())); diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java index ea5c55d9a0d..9178d112aca 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java @@ -49,7 +49,7 @@ public class IndexGraveyardTests extends ESTestCase { final IndexGraveyard graveyard = createRandom(); assertThat(graveyard, equalTo(IndexGraveyard.builder(graveyard).build())); final IndexGraveyard.Builder newGraveyard = IndexGraveyard.builder(graveyard); - newGraveyard.addTombstone(new Index(randomAsciiOfLengthBetween(4, 15), UUIDs.randomBase64UUID())); + newGraveyard.addTombstone(new Index(randomAlphaOfLengthBetween(4, 15), UUIDs.randomBase64UUID())); assertThat(newGraveyard.build(), not(graveyard)); } @@ -140,7 +140,7 @@ public class IndexGraveyardTests extends ESTestCase { for (final Index index : indices) { assertTrue(indexGraveyard.containsIndex(index)); } - assertFalse(indexGraveyard.containsIndex(new Index(randomAsciiOfLength(6), UUIDs.randomBase64UUID()))); + assertFalse(indexGraveyard.containsIndex(new Index(randomAlphaOfLength(6), UUIDs.randomBase64UUID()))); } public static IndexGraveyard createRandom() { diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java index 50fe3c88b65..387b66d031b 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java @@ -152,7 +152,7 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase { } public void testShrinkIndexSettings() { - String indexName = randomAsciiOfLength(10); + String indexName = randomAlphaOfLength(10); List versions = Arrays.asList(VersionUtils.randomVersion(random()), VersionUtils.randomVersion(random()), VersionUtils.randomVersion(random())); versions.sort((l, r) -> Long.compare(l.id, r.id)); @@ -191,8 +191,6 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase { assertEquals("1", builder.build().get("index.allocation.max_retries")); assertEquals(version, builder.build().getAsVersion("index.version.created", null)); assertEquals(upgraded, builder.build().getAsVersion("index.version.upgraded", null)); - assertEquals(minCompat.luceneVersion.toString(), builder.build().get("index.version.minimum_compatible", null)); - } private DiscoveryNode newNode(String nodeId) { diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexServiceTests.java index 0ad36713810..d65a35b8c26 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexServiceTests.java @@ -55,7 +55,7 @@ public class MetaDataDeleteIndexServiceTests extends ESTestCase { } public void testDeleteSnapshotting() { - String index = randomAsciiOfLength(5); + String index = randomAlphaOfLength(5); Snapshot snapshot = new Snapshot("doesn't matter", new SnapshotId("snapshot name", "snapshot uuid")); SnapshotsInProgress snaps = new SnapshotsInProgress(new SnapshotsInProgress.Entry(snapshot, true, false, SnapshotsInProgress.State.INIT, singletonList(new IndexId(index, "doesn't matter")), @@ -71,7 +71,7 @@ public class MetaDataDeleteIndexServiceTests extends ESTestCase { public void testDeleteUnassigned() { // Create an unassigned index - String index = randomAsciiOfLength(5); + String index = randomAlphaOfLength(5); ClusterState before = clusterState(index); // Mock the built reroute diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java index 3b894eb8d7c..e5b52d8cf52 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java @@ -60,7 +60,7 @@ public class MetaDataIndexAliasesServiceTests extends ESTestCase { public void testAddAndRemove() { // Create a state with a single index - String index = randomAsciiOfLength(5); + String index = randomAlphaOfLength(5); ClusterState before = createIndex(ClusterState.builder(ClusterName.DEFAULT).build(), index); // Add an alias to it diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java index 88e2835b5ab..24e969d06d6 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java @@ -86,8 +86,7 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase { final IndexMetaData metaData = newIndexMeta("foo", Settings.builder() .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_5_0_0_beta1) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.fromString("2.4.0")) - .put(IndexMetaData.SETTING_VERSION_MINIMUM_COMPATIBLE, - Version.CURRENT.luceneVersion.toString()).build()); + .build()); String message = expectThrows(IllegalStateException.class, () -> service.upgradeIndexMetaData(metaData, Version.CURRENT.minimumIndexCompatibilityVersion())).getMessage(); assertEquals(message, "The index [[foo/BOOM]] was created with version [2.4.0] but the minimum compatible version is [5.0.0]." + @@ -96,8 +95,7 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase { IndexMetaData goodMeta = newIndexMeta("foo", Settings.builder() .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_5_0_0_beta1) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.fromString("5.1.0")) - .put(IndexMetaData.SETTING_VERSION_MINIMUM_COMPATIBLE, - Version.CURRENT.luceneVersion.toString()).build()); + .build()); service.upgradeIndexMetaData(goodMeta, Version.V_5_0_0.minimumIndexCompatibilityVersion()); } @@ -113,5 +111,4 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase { IndexMetaData metaData = IndexMetaData.builder(name).settings(build).build(); return metaData; } - } diff --git a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java index 4ad1c5fdd08..9200e04c712 100644 --- a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.cluster.node; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.elasticsearch.Version; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.VersionUtils; import java.util.ArrayList; import java.util.Arrays; @@ -172,7 +171,7 @@ public class DiscoveryNodesTests extends ESTestCase { for (int i = 0; i < numNodes; i++) { Map attributes = new HashMap<>(); if (frequently()) { - attributes.put("custom", randomBoolean() ? "match" : randomAsciiOfLengthBetween(3, 5)); + attributes.put("custom", randomBoolean() ? "match" : randomAlphaOfLengthBetween(3, 5)); } final DiscoveryNode node = newNode(idGenerator.getAndIncrement(), attributes, new HashSet<>(randomSubsetOf(Arrays.asList(DiscoveryNode.Role.values())))); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java index c24a86176dc..d8cc4c09386 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java @@ -58,7 +58,7 @@ public class OperationRoutingTests extends ESTestCase{ assertEquals(shardSplits[1], (shardSplits[1] / shardSplits[2]) * shardSplits[2]); IndexMetaData metaData = IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(shardSplits[0]) .numberOfReplicas(1).build(); - String term = randomAsciiOfLength(10); + String term = randomAlphaOfLength(10); final int shard = OperationRouting.generateShardId(metaData, term, null); IndexMetaData shrunk = IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(shardSplits[1]) .numberOfReplicas(1) diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RandomShardRoutingMutator.java b/core/src/test/java/org/elasticsearch/cluster/routing/RandomShardRoutingMutator.java index 69773e99921..932d24f2da1 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RandomShardRoutingMutator.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RandomShardRoutingMutator.java @@ -21,7 +21,7 @@ package org.elasticsearch.cluster.routing; import java.util.Set; -import static org.elasticsearch.test.ESTestCase.randomAsciiOfLength; +import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; import static org.elasticsearch.test.ESTestCase.randomFrom; import static org.elasticsearch.test.ESTestCase.randomInt; @@ -37,9 +37,9 @@ public final class RandomShardRoutingMutator { switch (randomInt(2)) { case 0: if (shardRouting.unassigned() == false && shardRouting.primary() == false) { - shardRouting = shardRouting.moveToUnassigned(new UnassignedInfo(randomReason(), randomAsciiOfLength(10))); + shardRouting = shardRouting.moveToUnassigned(new UnassignedInfo(randomReason(), randomAlphaOfLength(10))); } else if (shardRouting.unassignedInfo() != null) { - shardRouting = shardRouting.updateUnassigned(new UnassignedInfo(randomReason(), randomAsciiOfLength(10)), + shardRouting = shardRouting.updateUnassigned(new UnassignedInfo(randomReason(), randomAlphaOfLength(10)), shardRouting.recoverySource()); } break; diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java index a4c8d0e4247..0b0f6ae273d 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java @@ -118,7 +118,7 @@ public class ShardRoutingTests extends ESTestCase { switch (changeId) { case 0: // change index - ShardId shardId = new ShardId(new Index("blubb", randomAsciiOfLength(10)), otherRouting.id()); + ShardId shardId = new ShardId(new Index("blubb", randomAlphaOfLength(10)), otherRouting.id()); otherRouting = new ShardRouting(shardId, otherRouting.currentNodeId(), otherRouting.relocatingNodeId(), otherRouting.primary(), otherRouting.state(), otherRouting.recoverySource(), otherRouting.unassignedInfo(), otherRouting.allocationId(), otherRouting.getExpectedShardSize()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java index 4fffcebc79b..73ff7544ae2 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java @@ -79,9 +79,9 @@ public class UnassignedInfoTests extends ESAllocationTestCase { public void testSerialization() throws Exception { UnassignedInfo.Reason reason = RandomPicks.randomFrom(random(), UnassignedInfo.Reason.values()); UnassignedInfo meta = reason == UnassignedInfo.Reason.ALLOCATION_FAILED ? - new UnassignedInfo(reason, randomBoolean() ? randomAsciiOfLength(4) : null, null, randomIntBetween(1, 100), System.nanoTime(), + new UnassignedInfo(reason, randomBoolean() ? randomAlphaOfLength(4) : null, null, randomIntBetween(1, 100), System.nanoTime(), System.currentTimeMillis(), false, AllocationStatus.NO_ATTEMPT): - new UnassignedInfo(reason, randomBoolean() ? randomAsciiOfLength(4) : null); + new UnassignedInfo(reason, randomBoolean() ? randomAlphaOfLength(4) : null); BytesStreamOutput out = new BytesStreamOutput(); meta.writeTo(out); out.close(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecisionTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecisionTests.java index 0f194a0f912..3fcd743a8a1 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecisionTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecisionTests.java @@ -174,7 +174,7 @@ public class AllocateUnassignedDecisionTests extends ESTestCase { randomFrom(Decision.NO, Decision.THROTTLE, Decision.YES), 1)); AllocateUnassignedDecision decision; if (finalDecision == Decision.Type.YES) { - decision = AllocateUnassignedDecision.yes(assignedNode, randomBoolean() ? randomAsciiOfLength(5) : null, + decision = AllocateUnassignedDecision.yes(assignedNode, randomBoolean() ? randomAlphaOfLength(5) : null, nodeDecisions, randomBoolean()); } else { decision = AllocateUnassignedDecision.no(randomFrom( diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalancedSingleShardTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalancedSingleShardTests.java index cb69a5de87f..a63447e845b 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalancedSingleShardTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/BalancedSingleShardTests.java @@ -191,7 +191,7 @@ public class BalancedSingleShardTests extends ESAllocationTestCase { int excludeNodesSize = randomIntBetween(0, numAddedNodes - 1); final Set excludeNodes = new HashSet<>(); for (int i = 0; i < numAddedNodes; i++) { - DiscoveryNode discoveryNode = newNode(randomAsciiOfLength(7)); + DiscoveryNode discoveryNode = newNode(randomAlphaOfLength(7)); nodesBuilder.add(discoveryNode); if (i < excludeNodesSize) { excludeNodes.add(discoveryNode.getId()); @@ -331,7 +331,7 @@ public class BalancedSingleShardTests extends ESAllocationTestCase { private ClusterState addNodesToClusterState(ClusterState clusterState, int numNodesToAdd) { DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(clusterState.nodes()); for (int i = 0; i < numNodesToAdd; i++) { - DiscoveryNode discoveryNode = newNode(randomAsciiOfLength(7)); + DiscoveryNode discoveryNode = newNode(randomAlphaOfLength(7)); nodesBuilder.add(discoveryNode); } return ClusterState.builder(clusterState).nodes(nodesBuilder).build(); @@ -352,7 +352,7 @@ public class BalancedSingleShardTests extends ESAllocationTestCase { ClusterState clusterState = ClusterStateCreationUtils.state("idx", 2, numShards); // add a new node so shards can be rebalanced there DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(clusterState.nodes()); - nodesBuilder.add(newNode(randomAsciiOfLength(7))); + nodesBuilder.add(newNode(randomAlphaOfLength(7))); clusterState = ClusterState.builder(clusterState).nodes(nodesBuilder).build(); ShardRouting shard = clusterState.routingTable().index("idx").shard(0).primaryShard(); RoutingAllocation routingAllocation = newRoutingAllocation( diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeAllocationResultTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeAllocationResultTests.java index 0461528fe06..4b846c849f0 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeAllocationResultTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeAllocationResultTests.java @@ -61,7 +61,7 @@ public class NodeAllocationResultTests extends ESTestCase { assertFalse(explanation.getShardStoreInfo().isInSync()); assertFalse(explanation.getShardStoreInfo().hasMatchingSyncId()); - String allocId = randomAsciiOfLength(5); + String allocId = randomAlphaOfLength(5); boolean inSync = randomBoolean(); shardStoreInfo = new ShardStoreInfo(allocId, inSync, randomBoolean() ? new Exception("bad stuff") : null); explanation = new NodeAllocationResult(node, shardStoreInfo, decision); diff --git a/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java b/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java index c2f27beb04e..71fe7b262a6 100644 --- a/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java +++ b/core/src/test/java/org/elasticsearch/common/cache/CacheTests.java @@ -712,7 +712,7 @@ public class CacheTests extends ESTestCase { CyclicBarrier barrier = new CyclicBarrier(1 + numberOfThreads); - final String key = randomAsciiOfLengthBetween(2, 32); + final String key = randomAlphaOfLengthBetween(2, 32); for (int i = 0; i < numberOfThreads; i++) { Thread thread = new Thread(() -> { try { diff --git a/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java b/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java index 4a02a1f345b..a8b065343c9 100644 --- a/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java +++ b/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java @@ -339,8 +339,8 @@ public class BytesStreamsTests extends ESTestCase { try (BytesStreamOutput out = new BytesStreamOutput()) { NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.singletonList( new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME, TestNamedWriteable::new))); - TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), - randomAsciiOfLengthBetween(1, 10)); + TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAlphaOfLengthBetween(1, 10), + randomAlphaOfLengthBetween(1, 10)); out.writeNamedWriteable(namedWriteableIn); byte[] bytes = BytesReference.toBytes(out.bytes()); @@ -360,7 +360,7 @@ public class BytesStreamsTests extends ESTestCase { int size = between(0, 100); List expected = new ArrayList<>(size); for (int i = 0; i < size; i++) { - expected.add(new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10))); + expected.add(new TestNamedWriteable(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))); } try (BytesStreamOutput out = new BytesStreamOutput()) { @@ -386,8 +386,8 @@ public class BytesStreamsTests extends ESTestCase { try (BytesStreamOutput out = new BytesStreamOutput()) { NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.singletonList( new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME, (StreamInput in) -> null))); - TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), - randomAsciiOfLengthBetween(1, 10)); + TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAlphaOfLengthBetween(1, 10), + randomAlphaOfLengthBetween(1, 10)); out.writeNamedWriteable(namedWriteableIn); byte[] bytes = BytesReference.toBytes(out.bytes()); try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry)) { @@ -400,7 +400,7 @@ public class BytesStreamsTests extends ESTestCase { public void testOptionalWriteableReaderReturnsNull() throws IOException { try (BytesStreamOutput out = new BytesStreamOutput()) { - out.writeOptionalWriteable(new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10))); + out.writeOptionalWriteable(new TestNamedWriteable(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))); StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes())); IOException e = expectThrows(IOException.class, () -> in.readOptionalWriteable((StreamInput ignored) -> null)); assertThat(e.getMessage(), endsWith("] returned null which is not allowed and probably means it screwed up the stream.")); @@ -417,8 +417,8 @@ public class BytesStreamsTests extends ESTestCase { return "intentionally-broken"; } }))); - TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), - randomAsciiOfLengthBetween(1, 10)); + TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAlphaOfLengthBetween(1, 10), + randomAlphaOfLengthBetween(1, 10)); out.writeNamedWriteable(namedWriteableIn); byte[] bytes = BytesReference.toBytes(out.bytes()); try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry)) { @@ -461,7 +461,7 @@ public class BytesStreamsTests extends ESTestCase { final int size = randomIntBetween(0, 100); final Map expected = new HashMap<>(randomIntBetween(0, 100)); for (int i = 0; i < size; ++i) { - expected.put(randomAsciiOfLength(2), randomAsciiOfLength(5)); + expected.put(randomAlphaOfLength(2), randomAlphaOfLength(5)); } final BytesStreamOutput out = new BytesStreamOutput(); @@ -482,10 +482,10 @@ public class BytesStreamsTests extends ESTestCase { List list = new ArrayList<>(listSize); for (int j = 0; j < listSize; ++j) { - list.add(randomAsciiOfLength(5)); + list.add(randomAlphaOfLength(5)); } - expected.put(randomAsciiOfLength(2), list); + expected.put(randomAlphaOfLength(2), list); } final BytesStreamOutput out = new BytesStreamOutput(); @@ -633,8 +633,8 @@ public class BytesStreamsTests extends ESTestCase { public void testWriteMapWithConsistentOrder() throws IOException { Map map = randomMap(new TreeMap<>(), randomIntBetween(2, 20), - () -> randomAsciiOfLength(5), - () -> randomAsciiOfLength(5)); + () -> randomAlphaOfLength(5), + () -> randomAlphaOfLength(5)); Map reverseMap = new TreeMap<>(Collections.reverseOrder()); reverseMap.putAll(map); @@ -655,8 +655,8 @@ public class BytesStreamsTests extends ESTestCase { public void testReadMapByUsingWriteMapWithConsistentOrder() throws IOException { Map streamOutMap = randomMap(new HashMap<>(), randomIntBetween(2, 20), - () -> randomAsciiOfLength(5), - () -> randomAsciiOfLength(5)); + () -> randomAlphaOfLength(5), + () -> randomAlphaOfLength(5)); try (BytesStreamOutput streamOut = new BytesStreamOutput()) { streamOut.writeMapWithConsistentOrder(streamOutMap); StreamInput in = StreamInput.wrap(BytesReference.toBytes(streamOut.bytes())); diff --git a/core/src/test/java/org/elasticsearch/common/logging/DeprecationLoggerTests.java b/core/src/test/java/org/elasticsearch/common/logging/DeprecationLoggerTests.java index f957752c718..3f2274321a2 100644 --- a/core/src/test/java/org/elasticsearch/common/logging/DeprecationLoggerTests.java +++ b/core/src/test/java/org/elasticsearch/common/logging/DeprecationLoggerTests.java @@ -58,7 +58,7 @@ public class DeprecationLoggerTests extends ESTestCase { try (ThreadContext threadContext = new ThreadContext(Settings.EMPTY)) { final Set threadContexts = Collections.singleton(threadContext); - final String param = randomAsciiOfLengthBetween(1, 5); + final String param = randomAlphaOfLengthBetween(1, 5); logger.deprecated(threadContexts, "A simple message [{}]", param); final Map> responseHeaders = threadContext.getResponseHeaders(); @@ -75,9 +75,9 @@ public class DeprecationLoggerTests extends ESTestCase { try (ThreadContext threadContext = new ThreadContext(Settings.EMPTY)) { final Set threadContexts = Collections.singleton(threadContext); - final String param = randomAsciiOfLengthBetween(1, 5); + final String param = randomAlphaOfLengthBetween(1, 5); logger.deprecated(threadContexts, "A simple message [{}]", param); - final String second = randomAsciiOfLengthBetween(1, 10); + final String second = randomAlphaOfLengthBetween(1, 10); logger.deprecated(threadContexts, second); final Map> responseHeaders = threadContext.getResponseHeaders(); @@ -167,7 +167,7 @@ public class DeprecationLoggerTests extends ESTestCase { } public void testWarningValueFromWarningHeader() throws InterruptedException { - final String s = randomAsciiOfLength(16); + final String s = randomAlphaOfLength(16); final String first = DeprecationLogger.formatWarning(s); assertThat(DeprecationLogger.extractWarningValueFromWarningHeader(first), equalTo(s)); } diff --git a/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java b/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java index d9fe806e53b..7c2a2f52b60 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java @@ -88,7 +88,7 @@ public class FreqTermsEnumTests extends ESTestCase { iw = new IndexWriter(dir, conf); terms = new String[scaledRandomIntBetween(10, 300)]; for (int i = 0; i < terms.length; i++) { - terms[i] = randomAsciiOfLength(5); + terms[i] = randomAlphaOfLength(5); } int numberOfDocs = scaledRandomIntBetween(30, 300); diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 7ec8f41034f..76905d43799 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -35,7 +35,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.IllegalFormatCodePointException; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; @@ -45,6 +44,7 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; import java.util.function.Function; +import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.startsWith; @@ -252,13 +252,32 @@ public class ScopedSettingsTests extends ESTestCase { new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(Setting.intSetting("foo.bar", 1, Property.Dynamic, Property.NodeScope), Setting.intSetting("foo.bar.baz", 1, Property.NodeScope)))); - assertFalse(settings.hasDynamicSetting("foo.bar.baz")); - assertTrue(settings.hasDynamicSetting("foo.bar")); + assertFalse(settings.isDynamicSetting("foo.bar.baz")); + assertTrue(settings.isDynamicSetting("foo.bar")); assertNotNull(settings.get("foo.bar.baz")); settings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - assertTrue(settings.hasDynamicSetting("transport.tracer.include." + randomIntBetween(1, 100))); - assertFalse(settings.hasDynamicSetting("transport.tracer.include.BOOM")); - assertTrue(settings.hasDynamicSetting("cluster.routing.allocation.require.value")); + assertTrue(settings.isDynamicSetting("transport.tracer.include." + randomIntBetween(1, 100))); + assertFalse(settings.isDynamicSetting("transport.tracer.include.BOOM")); + assertTrue(settings.isDynamicSetting("cluster.routing.allocation.require.value")); + } + + public void testIsFinal() { + ClusterSettings settings = + new ClusterSettings(Settings.EMPTY, + new HashSet<>(Arrays.asList(Setting.intSetting("foo.int", 1, Property.Final, Property.NodeScope), + Setting.groupSetting("foo.group.", Property.Final, Property.NodeScope), + Setting.groupSetting("foo.list.", Property.Final, Property.NodeScope), + Setting.intSetting("foo.int.baz", 1, Property.NodeScope)))); + + assertFalse(settings.isFinalSetting("foo.int.baz")); + assertTrue(settings.isFinalSetting("foo.int")); + + assertFalse(settings.isFinalSetting("foo.list")); + assertTrue(settings.isFinalSetting("foo.list.0.key")); + assertTrue(settings.isFinalSetting("foo.list.key")); + + assertFalse(settings.isFinalSetting("foo.group")); + assertTrue(settings.isFinalSetting("foo.group.key")); } public void testDiff() throws IOException { @@ -581,4 +600,45 @@ public class ScopedSettingsTests extends ESTestCase { } } } + + public void testUpdateNumberOfShardsFail() { + IndexScopedSettings settings = new IndexScopedSettings(Settings.EMPTY, + IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> settings.updateSettings(Settings.builder().put("index.number_of_shards", 8).build(), + Settings.builder(), Settings.builder(), "index")); + assertThat(ex.getMessage(), + containsString("final index setting [index.number_of_shards], not updateable")); + } + + public void testFinalSettingUpdateFail() { + Setting finalSetting = Setting.intSetting("some.final.setting", 1, Property.Final, Property.NodeScope); + Setting finalGroupSetting = Setting.groupSetting("some.final.group.", Property.Final, Property.NodeScope); + Settings currentSettings = Settings.builder() + .put("some.final.setting", 9) + .put("some.final.group.foo", 7) + .build(); + ClusterSettings service = new ClusterSettings(currentSettings + , new HashSet<>(Arrays.asList(finalSetting, finalGroupSetting))); + + IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> + service.updateDynamicSettings(Settings.builder().put("some.final.setting", 8).build(), + Settings.builder().put(currentSettings), Settings.builder(), "node")); + assertThat(exc.getMessage(), containsString("final node setting [some.final.setting]")); + + exc = expectThrows(IllegalArgumentException.class, () -> + service.updateDynamicSettings(Settings.builder().putNull("some.final.setting").build(), + Settings.builder().put(currentSettings), Settings.builder(), "node")); + assertThat(exc.getMessage(), containsString("final node setting [some.final.setting]")); + + exc = expectThrows(IllegalArgumentException.class, () -> + service.updateSettings(Settings.builder().put("some.final.group.new", 8).build(), + Settings.builder().put(currentSettings), Settings.builder(), "node")); + assertThat(exc.getMessage(), containsString("final node setting [some.final.group.new]")); + + exc = expectThrows(IllegalArgumentException.class, () -> + service.updateSettings(Settings.builder().put("some.final.group.foo", 5).build(), + Settings.builder().put(currentSettings), Settings.builder(), "node")); + assertThat(exc.getMessage(), containsString("final node setting [some.final.group.foo]")); + } } diff --git a/core/src/test/java/org/elasticsearch/common/settings/SecureStringTests.java b/core/src/test/java/org/elasticsearch/common/settings/SecureStringTests.java index 4f9ed8ed4b9..50bdab1d4f9 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SecureStringTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SecureStringTests.java @@ -30,7 +30,7 @@ import static org.hamcrest.Matchers.sameInstance; public class SecureStringTests extends ESTestCase { public void testCloseableCharsDoesNotModifySecureString() { - final char[] password = randomAsciiOfLengthBetween(1, 32).toCharArray(); + final char[] password = randomAlphaOfLengthBetween(1, 32).toCharArray(); SecureString secureString = new SecureString(password); assertSecureStringEqualToChars(password, secureString); try (SecureString copy = secureString.clone()) { @@ -41,7 +41,7 @@ public class SecureStringTests extends ESTestCase { } public void testClosingSecureStringDoesNotModifyCloseableChars() { - final char[] password = randomAsciiOfLengthBetween(1, 32).toCharArray(); + final char[] password = randomAlphaOfLengthBetween(1, 32).toCharArray(); SecureString secureString = new SecureString(password); assertSecureStringEqualToChars(password, secureString); SecureString copy = secureString.clone(); @@ -55,7 +55,7 @@ public class SecureStringTests extends ESTestCase { } public void testClosingChars() { - final char[] password = randomAsciiOfLengthBetween(1, 32).toCharArray(); + final char[] password = randomAlphaOfLengthBetween(1, 32).toCharArray(); SecureString secureString = new SecureString(password); assertSecureStringEqualToChars(password, secureString); SecureString copy = secureString.clone(); @@ -71,7 +71,7 @@ public class SecureStringTests extends ESTestCase { } public void testGetCloseableCharsAfterSecureStringClosed() { - final char[] password = randomAsciiOfLengthBetween(1, 32).toCharArray(); + final char[] password = randomAlphaOfLengthBetween(1, 32).toCharArray(); SecureString secureString = new SecureString(password); assertSecureStringEqualToChars(password, secureString); secureString.close(); diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 24b48cbf368..0bb1abb37ad 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -552,12 +552,15 @@ public class SettingTests extends ESTestCase { * We can't have Null properties */ public void testRejectNullProperties() { - try { - Setting.simpleString("foo.bar", (Property[]) null); - fail(); - } catch (IllegalArgumentException ex) { - assertThat(ex.getMessage(), containsString("properties cannot be null for setting")); - } + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> Setting.simpleString("foo.bar", (Property[]) null)); + assertThat(ex.getMessage(), containsString("properties cannot be null for setting")); + } + + public void testRejectConflictProperties() { + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> Setting.simpleString("foo.bar", Property.Final, Property.Dynamic)); + assertThat(ex.getMessage(), containsString("final setting [foo.bar] cannot be dynamic")); } public void testTimeValue() { diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java index 5deb774c2df..f747a20e468 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java @@ -69,7 +69,7 @@ public class SettingsTests extends ESTestCase { } public void testReplacePropertiesPlaceholderByEnvironmentVariables() { - final String hostname = randomAsciiOfLength(16); + final String hostname = randomAlphaOfLength(16); final Settings implicitEnvSettings = Settings.builder() .put("setting1", "${HOSTNAME}") .replacePropertyPlaceholders(name -> "HOSTNAME".equals(name) ? hostname : null) diff --git a/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java b/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java index cbfd98aa3b7..02394df3867 100644 --- a/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java +++ b/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java @@ -138,7 +138,7 @@ public class TimeValueTests extends ESTestCase { private static final String FRACTIONAL_TIME_VALUES_ARE_NOT_SUPPORTED = "fractional time values are not supported"; public void testNonFractionalTimeValues() { - final String s = randomAsciiOfLength(10) + randomTimeUnit(); + final String s = randomAlphaOfLength(10) + randomTimeUnit(); final ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> TimeValue.parseTimeValue(s, null, "test")); assertThat(e, hasToString(containsString("failed to parse [" + s + "]"))); diff --git a/core/src/test/java/org/elasticsearch/common/util/BytesRefHashTests.java b/core/src/test/java/org/elasticsearch/common/util/BytesRefHashTests.java index 9f90fcf151d..78abed0b320 100644 --- a/core/src/test/java/org/elasticsearch/common/util/BytesRefHashTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/BytesRefHashTests.java @@ -63,7 +63,7 @@ public class BytesRefHashTests extends ESSingleNodeTestCase { final int len = randomIntBetween(1, 100000); final BytesRef[] values = new BytesRef[len]; for (int i = 0; i < values.length; ++i) { - values[i] = new BytesRef(randomAsciiOfLength(5)); + values[i] = new BytesRef(randomAlphaOfLength(5)); } final ObjectLongMap valueToId = new ObjectLongHashMap<>(); final BytesRef[] idToValue = new BytesRef[values.length]; diff --git a/core/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java b/core/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java index 581d58d896d..bf10c117b13 100644 --- a/core/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java @@ -71,7 +71,7 @@ public class IndexFolderUpgraderTests extends ESTestCase { .put(NodeEnvironment.ADD_NODE_LOCK_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()) .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), customPath.toAbsolutePath().toString()).build(); try (NodeEnvironment nodeEnv = newNodeEnvironment(nodeSettings)) { - final Index index = new Index(randomAsciiOfLength(10), UUIDs.randomBase64UUID()); + final Index index = new Index(randomAlphaOfLength(10), UUIDs.randomBase64UUID()); Settings settings = Settings.builder() .put(nodeSettings) .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID()) @@ -100,7 +100,7 @@ public class IndexFolderUpgraderTests extends ESTestCase { .put(NodeEnvironment.ADD_NODE_LOCK_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()) .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), customPath.toAbsolutePath().toString()).build(); try (NodeEnvironment nodeEnv = newNodeEnvironment(nodeSettings)) { - final Index index = new Index(randomAsciiOfLength(10), UUIDs.randomBase64UUID()); + final Index index = new Index(randomAlphaOfLength(10), UUIDs.randomBase64UUID()); Settings settings = Settings.builder() .put(nodeSettings) .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID()) @@ -139,7 +139,7 @@ public class IndexFolderUpgraderTests extends ESTestCase { final Settings nodeSettings = Settings.builder() .put(NodeEnvironment.ADD_NODE_LOCK_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()).build(); try (NodeEnvironment nodeEnv = newNodeEnvironment(nodeSettings)) { - final Index index = new Index(randomAsciiOfLength(10), UUIDs.randomBase64UUID()); + final Index index = new Index(randomAlphaOfLength(10), UUIDs.randomBase64UUID()); Settings settings = Settings.builder() .put(nodeSettings) .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID()) @@ -164,7 +164,7 @@ public class IndexFolderUpgraderTests extends ESTestCase { try (NodeEnvironment nodeEnv = newNodeEnvironment(nodeSettings)) { Map> indexSettingsMap = new HashMap<>(); for (int i = 0; i < randomIntBetween(2, 5); i++) { - final Index index = new Index(randomAsciiOfLength(10), UUIDs.randomBase64UUID()); + final Index index = new Index(randomAlphaOfLength(10), UUIDs.randomBase64UUID()); Settings settings = Settings.builder() .put(nodeSettings) .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID()) diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java index 698e216ea16..bee56c229c0 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java @@ -430,11 +430,16 @@ public class ThreadContextTests extends ESTestCase { // create a abstract runnable, add headers and transient objects and verify in the methods try (ThreadContext.StoredContext ignored = threadContext.stashContext()) { threadContext.putHeader("foo", "bar"); + boolean systemContext = randomBoolean(); + if (systemContext) { + threadContext.markAsSystemContext(); + } threadContext.putTransient("foo", "bar_transient"); withContext = threadContext.preserveContext(new AbstractRunnable() { @Override public void onAfter() { + assertEquals(systemContext, threadContext.isSystemContext()); assertEquals("bar", threadContext.getHeader("foo")); assertEquals("bar_transient", threadContext.getTransient("foo")); assertNotNull(threadContext.getTransient("failure")); @@ -445,6 +450,7 @@ public class ThreadContextTests extends ESTestCase { @Override public void onFailure(Exception e) { + assertEquals(systemContext, threadContext.isSystemContext()); assertEquals("exception from doRun", e.getMessage()); assertEquals("bar", threadContext.getHeader("foo")); assertEquals("bar_transient", threadContext.getTransient("foo")); @@ -454,6 +460,7 @@ public class ThreadContextTests extends ESTestCase { @Override protected void doRun() throws Exception { + assertEquals(systemContext, threadContext.isSystemContext()); assertEquals("bar", threadContext.getHeader("foo")); assertEquals("bar_transient", threadContext.getTransient("foo")); assertFalse(threadContext.isDefaultContext()); @@ -594,6 +601,18 @@ public class ThreadContextTests extends ESTestCase { } } + public void testMarkAsSystemContext() throws IOException { + try (ThreadContext threadContext = new ThreadContext(Settings.EMPTY)) { + assertFalse(threadContext.isSystemContext()); + try(ThreadContext.StoredContext context = threadContext.stashContext()){ + assertFalse(threadContext.isSystemContext()); + threadContext.markAsSystemContext(); + assertTrue(threadContext.isSystemContext()); + } + assertFalse(threadContext.isSystemContext()); + } + } + /** * Sometimes wraps a Runnable in an AbstractRunnable. */ diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java index 0e11e630ea9..32ffb33b694 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java @@ -66,12 +66,12 @@ public class ConstructingObjectParserTests extends ESTestCase { * Builds the object in random order and parses it. */ public void testRandomOrder() throws Exception { - HasCtorArguments expected = new HasCtorArguments(randomAsciiOfLength(5), randomInt()); + HasCtorArguments expected = new HasCtorArguments(randomAlphaOfLength(5), randomInt()); expected.setMineral(randomInt()); expected.setFruit(randomInt()); - expected.setA(randomBoolean() ? null : randomAsciiOfLength(5)); - expected.setB(randomBoolean() ? null : randomAsciiOfLength(5)); - expected.setC(randomBoolean() ? null : randomAsciiOfLength(5)); + expected.setA(randomBoolean() ? null : randomAlphaOfLength(5)); + expected.setB(randomBoolean() ? null : randomAlphaOfLength(5)); + expected.setC(randomBoolean() ? null : randomAlphaOfLength(5)); expected.setD(randomBoolean()); XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); expected.toXContent(builder, ToXContent.EMPTY_PARAMS); diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/UnknownNamedObjectExceptionTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/UnknownNamedObjectExceptionTests.java index 5ed3b0cf8c2..4fcc16416b5 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/UnknownNamedObjectExceptionTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/UnknownNamedObjectExceptionTests.java @@ -31,7 +31,7 @@ public class UnknownNamedObjectExceptionTests extends ESTestCase { public void testRoundTrip() throws IOException { XContentLocation location = new XContentLocation(between(1, 1000), between(1, 1000)); UnknownNamedObjectException created = new UnknownNamedObjectException(location, UnknownNamedObjectExceptionTests.class, - randomAsciiOfLength(5)); + randomAlphaOfLength(5)); UnknownNamedObjectException roundTripped; try (BytesStreamOutput out = new BytesStreamOutput()) { @@ -50,7 +50,7 @@ public class UnknownNamedObjectExceptionTests extends ESTestCase { public void testStatusCode() { XContentLocation location = new XContentLocation(between(1, 1000), between(1, 1000)); UnknownNamedObjectException e = new UnknownNamedObjectException(location, UnknownNamedObjectExceptionTests.class, - randomAsciiOfLength(5)); + randomAlphaOfLength(5)); assertEquals(RestStatus.BAD_REQUEST, e.status()); } } diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathTests.java index 4eec46d9b27..e151ac67a33 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathTests.java @@ -202,7 +202,7 @@ public class FilterPathTests extends ESTestCase { assertThat(filterPath.isSimpleWildcard(), is(true)); assertThat(filterPath.getSegment(), equalTo("*")); - FilterPath next = filterPath.matchProperty(randomAsciiOfLength(2)); + FilterPath next = filterPath.matchProperty(randomAlphaOfLength(2)); assertNotNull(next); assertSame(next, FilterPath.EMPTY); } @@ -246,7 +246,7 @@ public class FilterPathTests extends ESTestCase { assertThat(filterPath.isDoubleWildcard(), is(true)); assertThat(filterPath.getSegment(), equalTo("**")); - FilterPath next = filterPath.matchProperty(randomAsciiOfLength(2)); + FilterPath next = filterPath.matchProperty(randomAlphaOfLength(2)); assertNotNull(next); assertSame(next, FilterPath.EMPTY); } @@ -263,7 +263,7 @@ public class FilterPathTests extends ESTestCase { assertThat(filterPath.matches(), is(false)); assertThat(filterPath.getSegment(), equalTo("**")); - FilterPath next = filterPath.matchProperty(randomAsciiOfLength(2)); + FilterPath next = filterPath.matchProperty(randomAlphaOfLength(2)); assertNotNull(next); assertThat(next.matches(), is(false)); assertThat(next.getSegment(), equalTo("bar")); diff --git a/core/src/test/java/org/elasticsearch/deps/joda/SimpleJodaTests.java b/core/src/test/java/org/elasticsearch/deps/joda/SimpleJodaTests.java index 9ed433918d1..b99dd8e8353 100644 --- a/core/src/test/java/org/elasticsearch/deps/joda/SimpleJodaTests.java +++ b/core/src/test/java/org/elasticsearch/deps/joda/SimpleJodaTests.java @@ -314,6 +314,36 @@ public class SimpleJodaTests extends ESTestCase { } } + public void testForInvalidTimeZoneWithEpochSeconds() { + DateTimeFormatter dateTimeFormatter = new DateTimeFormatterBuilder() + .append(new Joda.EpochTimeParser(false)) + .toFormatter() + .withZone(DateTimeZone.forOffsetHours(1)); + FormatDateTimeFormatter formatter = + new FormatDateTimeFormatter("epoch_seconds", dateTimeFormatter, Locale.ROOT); + try { + formatter.parser().parseDateTime("1433144433655"); + fail("Expected IllegalArgumentException"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("time_zone must be UTC")); + } + } + + public void testForInvalidTimeZoneWithEpochMillis() { + DateTimeFormatter dateTimeFormatter = new DateTimeFormatterBuilder() + .append(new Joda.EpochTimeParser(true)) + .toFormatter() + .withZone(DateTimeZone.forOffsetHours(1)); + FormatDateTimeFormatter formatter = + new FormatDateTimeFormatter("epoch_millis", dateTimeFormatter, Locale.ROOT); + try { + formatter.parser().parseDateTime("1433144433"); + fail("Expected IllegalArgumentException"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("time_zone must be UTC")); + } + } + public void testThatEpochParserIsPrinter() { FormatDateTimeFormatter formatter = Joda.forPattern("epoch_millis"); assertThat(formatter.parser().isPrinter(), is(true)); diff --git a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java index 837c74a3c61..59fe5872911 100644 --- a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java @@ -234,7 +234,7 @@ public class ZenFaultDetectionTests extends ESTestCase { public void testMasterFaultDetectionConnectOnDisconnect() throws InterruptedException { Settings.Builder settings = Settings.builder(); boolean shouldRetry = randomBoolean(); - ClusterName clusterName = new ClusterName(randomAsciiOfLengthBetween(3, 20)); + ClusterName clusterName = new ClusterName(randomAlphaOfLengthBetween(3, 20)); // make sure we don't ping settings.put(FaultDetection.CONNECT_ON_NETWORK_DISCONNECT_SETTING.getKey(), shouldRetry) @@ -272,7 +272,7 @@ public class ZenFaultDetectionTests extends ESTestCase { public void testMasterFaultDetectionNotSizeLimited() throws InterruptedException { boolean shouldRetry = randomBoolean(); - ClusterName clusterName = new ClusterName(randomAsciiOfLengthBetween(3, 20)); + ClusterName clusterName = new ClusterName(randomAlphaOfLengthBetween(3, 20)); final Settings settings = Settings.builder() .put(FaultDetection.CONNECT_ON_NETWORK_DISCONNECT_SETTING.getKey(), shouldRetry) .put(FaultDetection.PING_INTERVAL_SETTING.getKey(), "1s") diff --git a/core/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java b/core/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java new file mode 100644 index 00000000000..25641e16fca --- /dev/null +++ b/core/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java @@ -0,0 +1,178 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.discovery.single; + +import org.apache.lucene.util.IOUtils; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.discovery.zen.PingContextProvider; +import org.elasticsearch.discovery.zen.UnicastHostsProvider; +import org.elasticsearch.discovery.zen.UnicastZenPing; +import org.elasticsearch.discovery.zen.ZenPing; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalTestCluster; +import org.elasticsearch.test.NodeConfigurationSource; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.transport.MockTcpTransportPlugin; +import org.elasticsearch.transport.TransportService; + +import java.io.Closeable; +import java.io.IOException; +import java.util.Collections; +import java.util.Stack; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CountDownLatch; +import java.util.function.Function; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; + +@ESIntegTestCase.ClusterScope( + scope = ESIntegTestCase.Scope.TEST, + numDataNodes = 1, + numClientNodes = 0, + supportsDedicatedMasters = false, + autoMinMasterNodes = false) +public class SingleNodeDiscoveryIT extends ESIntegTestCase { + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings + .builder() + .put(super.nodeSettings(nodeOrdinal)) + .put("discovery.type", "single-node") + // TODO: do not use such a restrictive ephemeral port range + .put("transport.tcp.port", "49152-49156") + .build(); + } + + public void testDoesNotRespondToZenPings() throws Exception { + final Settings settings = + Settings.builder().put("cluster.name", internalCluster().getClusterName()).build(); + final Version version = Version.CURRENT; + final Stack closeables = new Stack<>(); + final TestThreadPool threadPool = new TestThreadPool(getClass().getName()); + try { + final MockTransportService pingTransport = + MockTransportService.createNewService(settings, version, threadPool, null); + pingTransport.start(); + closeables.push(pingTransport); + final TransportService nodeTransport = + internalCluster().getInstance(TransportService.class); + // try to ping the single node directly + final UnicastHostsProvider provider = + () -> Collections.singletonList(nodeTransport.getLocalNode()); + final CountDownLatch latch = new CountDownLatch(1); + final UnicastZenPing unicastZenPing = + new UnicastZenPing(settings, threadPool, pingTransport, provider) { + @Override + protected void finishPingingRound(PingingRound pingingRound) { + latch.countDown(); + super.finishPingingRound(pingingRound); + } + }; + final DiscoveryNodes nodes = + DiscoveryNodes.builder().add(pingTransport.getLocalNode()).build(); + final ClusterName clusterName = new ClusterName(internalCluster().getClusterName()); + final ClusterState state = ClusterState.builder(clusterName).nodes(nodes).build(); + unicastZenPing.start(new PingContextProvider() { + @Override + public ClusterState clusterState() { + return state; + } + + @Override + public DiscoveryNodes nodes() { + return DiscoveryNodes + .builder() + .add(nodeTransport.getLocalNode()) + .add(pingTransport.getLocalNode()) + .localNodeId(pingTransport.getLocalNode().getId()) + .build(); + } + }); + closeables.push(unicastZenPing); + final CompletableFuture responses = new CompletableFuture<>(); + unicastZenPing.ping(responses::complete, TimeValue.timeValueSeconds(3)); + latch.await(); + responses.get(); + assertThat(responses.get().size(), equalTo(0)); + } finally { + while (!closeables.isEmpty()) { + IOUtils.closeWhileHandlingException(closeables.pop()); + } + terminate(threadPool); + } + } + + public void testSingleNodesDoNotDiscoverEachOther() throws IOException, InterruptedException { + final NodeConfigurationSource configurationSource = new NodeConfigurationSource() { + @Override + public Settings nodeSettings(int nodeOrdinal) { + return Settings + .builder() + .put("discovery.type", "single-node") + .put("http.enabled", false) + .put("transport.type", "mock-socket-network") + /* + * We align the port ranges of the two as then with zen discovery these two + * nodes would find each other. + */ + // TODO: do not use such a restrictive ephemeral port range + .put("transport.tcp.port", "49152-49156") + .build(); + } + }; + try (InternalTestCluster other = + new InternalTestCluster( + randomLong(), + createTempDir(), + false, + false, + 1, + 1, + internalCluster().getClusterName(), + configurationSource, + 0, + false, + "other", + Collections.singletonList(MockTcpTransportPlugin.class), + Function.identity())) { + other.beforeTest(random(), 0); + final ClusterState first = internalCluster().getInstance(ClusterService.class).state(); + final ClusterState second = other.getInstance(ClusterService.class).state(); + assertThat(first.nodes().getSize(), equalTo(1)); + assertThat(second.nodes().getSize(), equalTo(1)); + assertThat( + first.nodes().getMasterNodeId(), + not(equalTo(second.nodes().getMasterNodeId()))); + assertThat( + first.metaData().clusterUUID(), + not(equalTo(second.metaData().clusterUUID()))); + } + } + +} diff --git a/core/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryTests.java b/core/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryTests.java new file mode 100644 index 00000000000..a0e0b699d78 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryTests.java @@ -0,0 +1,101 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.discovery.single; + +import org.apache.lucene.util.IOUtils; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateObserver; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; + +import java.io.Closeable; +import java.util.Stack; +import java.util.concurrent.CountDownLatch; + +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; +import static org.hamcrest.Matchers.equalTo; + +public class SingleNodeDiscoveryTests extends ESTestCase { + + public void testInitialJoin() throws Exception { + final Settings settings = Settings.EMPTY; + final Version version = Version.CURRENT; + final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + final Stack stack = new Stack<>(); + try { + final MockTransportService transportService = + MockTransportService.createNewService(settings, version, threadPool, null); + stack.push(transportService); + transportService.start(); + final DiscoveryNode node = transportService.getLocalNode(); + final ClusterService clusterService = createClusterService(threadPool, node); + stack.push(clusterService); + final SingleNodeDiscovery discovery = + new SingleNodeDiscovery(Settings.EMPTY, clusterService); + discovery.startInitialJoin(); + + // we are racing against the initial join which is asynchronous so we use an observer + final ClusterState state = clusterService.state(); + final ThreadContext threadContext = threadPool.getThreadContext(); + final ClusterStateObserver observer = + new ClusterStateObserver(state, clusterService, null, logger, threadContext); + if (state.nodes().getMasterNodeId() == null) { + final CountDownLatch latch = new CountDownLatch(1); + observer.waitForNextChange(new ClusterStateObserver.Listener() { + @Override + public void onNewClusterState(ClusterState state) { + latch.countDown(); + } + + @Override + public void onClusterServiceClose() { + latch.countDown(); + } + + @Override + public void onTimeout(TimeValue timeout) { + assert false; + } + }, s -> s.nodes().getMasterNodeId() != null); + + latch.await(); + } + + final DiscoveryNodes nodes = clusterService.state().nodes(); + assertThat(nodes.getSize(), equalTo(1)); + assertThat(nodes.getMasterNode().getId(), equalTo(node.getId())); + } finally { + while (!stack.isEmpty()) { + IOUtils.closeWhileHandlingException(stack.pop()); + } + terminate(threadPool); + } + } + +} diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/MembershipActionTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/MembershipActionTests.java new file mode 100644 index 00000000000..b8d9f175e64 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/discovery/zen/MembershipActionTests.java @@ -0,0 +1,80 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.discovery.zen; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; + +public class MembershipActionTests extends ESTestCase { + + public void testPreventJoinClusterWithNewerIndices() { + Settings.builder().build(); + MetaData.Builder metaBuilder = MetaData.builder(); + IndexMetaData indexMetaData = IndexMetaData.builder("test") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(1).build(); + metaBuilder.put(indexMetaData, false); + MetaData metaData = metaBuilder.build(); + MembershipAction.ensureIndexCompatibility(Version.CURRENT, metaData); + + expectThrows(IllegalStateException.class, () -> + MembershipAction.ensureIndexCompatibility(VersionUtils.getPreviousVersion(Version.CURRENT), + metaData)); + } + + public void testPreventJoinClusterWithUnsupportedIndices() { + Settings.builder().build(); + MetaData.Builder metaBuilder = MetaData.builder(); + IndexMetaData indexMetaData = IndexMetaData.builder("test") + .settings(settings(VersionUtils.getPreviousVersion(Version.CURRENT + .minimumIndexCompatibilityVersion()))) + .numberOfShards(1) + .numberOfReplicas(1).build(); + metaBuilder.put(indexMetaData, false); + MetaData metaData = metaBuilder.build(); + expectThrows(IllegalStateException.class, () -> + MembershipAction.ensureIndexCompatibility(Version.CURRENT, + metaData)); + } + + public void testSuccess() { + Settings.builder().build(); + MetaData.Builder metaBuilder = MetaData.builder(); + IndexMetaData indexMetaData = IndexMetaData.builder("test") + .settings(settings(VersionUtils.randomVersionBetween(random(), + Version.CURRENT.minimumIndexCompatibilityVersion(), Version.CURRENT))) + .numberOfShards(1) + .numberOfReplicas(1).build(); + metaBuilder.put(indexMetaData, false); + indexMetaData = IndexMetaData.builder("test1") + .settings(settings(VersionUtils.randomVersionBetween(random(), + Version.CURRENT.minimumIndexCompatibilityVersion(), Version.CURRENT))) + .numberOfShards(1) + .numberOfReplicas(1).build(); + metaBuilder.put(indexMetaData, false); + MetaData metaData = metaBuilder.build(); + MembershipAction.ensureIndexCompatibility(Version.CURRENT, + metaData); + } +} diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/PublishClusterStateActionTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/PublishClusterStateActionTests.java index 2cf623b702a..3e90f414760 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/PublishClusterStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/PublishClusterStateActionTests.java @@ -613,7 +613,7 @@ public class PublishClusterStateActionTests extends ESTestCase { logger.info("--> testing acceptances of any master when having no master"); ClusterState state = ClusterState.builder(node.clusterState) - .nodes(DiscoveryNodes.builder(node.nodes()).masterNodeId(randomAsciiOfLength(10))).incrementVersion().build(); + .nodes(DiscoveryNodes.builder(node.nodes()).masterNodeId(randomAlphaOfLength(10))).incrementVersion().build(); node.action.validateIncomingState(state, null); // now set a master node @@ -634,7 +634,7 @@ public class PublishClusterStateActionTests extends ESTestCase { logger.info("--> testing rejection of another cluster name"); try { - node.action.validateIncomingState(ClusterState.builder(new ClusterName(randomAsciiOfLength(10))) + node.action.validateIncomingState(ClusterState.builder(new ClusterName(randomAlphaOfLength(10))) .nodes(node.nodes()).build(), node.clusterState); fail("node accepted state with another cluster name"); } catch (IllegalStateException OK) { diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java index 2beac7eef2f..5df6bd214f3 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java @@ -504,7 +504,7 @@ public class UnicastZenPingTests extends ESTestCase { public void testUnknownHost() throws InterruptedException { final Logger logger = mock(Logger.class); final NetworkService networkService = new NetworkService(Settings.EMPTY, Collections.emptyList()); - final String hostname = randomAsciiOfLength(8); + final String hostname = randomAlphaOfLength(8); final UnknownHostException unknownHostException = new UnknownHostException(hostname); final Transport transport = new MockTcpTransport( Settings.EMPTY, diff --git a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java index d514cce2223..a396f4bfa6b 100644 --- a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java +++ b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java @@ -649,7 +649,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase { } else { return new FieldStats.Text(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomBoolean(), randomBoolean(), - new BytesRef(randomAsciiOfLength(10)), new BytesRef(randomAsciiOfLength(20))); + new BytesRef(randomAlphaOfLength(10)), new BytesRef(randomAlphaOfLength(20))); } case 4: if (withNullMinMax && randomBoolean()) { diff --git a/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java b/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java index bbf95795337..7b5530bba10 100644 --- a/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.index.Index; import org.elasticsearch.plugins.MetaDataUpgrader; import org.elasticsearch.cluster.ESAllocationTestCase; import org.elasticsearch.test.TestCustomMetaData; -import org.junit.Before; import java.util.Arrays; import java.util.Collections; @@ -438,7 +437,7 @@ public class GatewayMetaStateTests extends ESAllocationTestCase { } for (int i = 0; i < randomIntBetween(1, 5); i++) { builder.put( - IndexMetaData.builder(randomAsciiOfLength(10)) + IndexMetaData.builder(randomAlphaOfLength(10)) .settings(settings(Version.CURRENT)) .numberOfReplicas(randomIntBetween(0, 3)) .numberOfShards(randomIntBetween(1, 5)) diff --git a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java index 5cb6b2d9f98..1b443a33da5 100644 --- a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java @@ -327,12 +327,12 @@ public class MetaDataStateFormatTests extends ESTestCase { MetaData.Builder mdBuilder = MetaData.builder(); mdBuilder.generateClusterUuidIfNeeded(); for (int i = 0; i < numIndices; i++) { - mdBuilder.put(indexBuilder(randomAsciiOfLength(10) + "idx-"+i)); + mdBuilder.put(indexBuilder(randomAlphaOfLength(10) + "idx-"+i)); } int numDelIndices = randomIntBetween(0, 5); final IndexGraveyard.Builder graveyard = IndexGraveyard.builder(); for (int i = 0; i < numDelIndices; i++) { - graveyard.addTombstone(new Index(randomAsciiOfLength(10) + "del-idx-" + i, UUIDs.randomBase64UUID())); + graveyard.addTombstone(new Index(randomAlphaOfLength(10) + "del-idx-" + i, UUIDs.randomBase64UUID())); } mdBuilder.indexGraveyard(graveyard.build()); return mdBuilder.build(); diff --git a/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java index e618d44db5c..ca330d9d9df 100644 --- a/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java @@ -176,8 +176,8 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { * select the second node as target */ public void testShardLockObtainFailedExceptionPreferOtherValidCopies() { - String allocId1 = randomAsciiOfLength(10); - String allocId2 = randomAsciiOfLength(10); + String allocId1 = randomAlphaOfLength(10); + String allocId2 = randomAlphaOfLength(10); final RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), CLUSTER_RECOVERED, allocId1, allocId2);; testAllocator.addData(node1, allocId1, randomBoolean(), diff --git a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java index a32d076272b..bc3ee4b5f06 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java @@ -370,6 +370,27 @@ public class IndexSettingsTests extends ESTestCase { assertEquals(actualNewTranslogFlushThresholdSize, settings.getFlushThresholdSize()); } + public void testTranslogGenerationSizeThreshold() { + final ByteSizeValue size = new ByteSizeValue(Math.abs(randomInt())); + final String key = IndexSettings.INDEX_TRANSLOG_GENERATION_THRESHOLD_SIZE_SETTING.getKey(); + final ByteSizeValue actualValue = + ByteSizeValue.parseBytesSizeValue(size.toString(), key); + final IndexMetaData metaData = + newIndexMeta( + "index", + Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(key, size.toString()) + .build()); + final IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + assertEquals(actualValue, settings.getGenerationThresholdSize()); + final ByteSizeValue newSize = new ByteSizeValue(Math.abs(randomInt())); + final ByteSizeValue actual = ByteSizeValue.parseBytesSizeValue(newSize.toString(), key); + settings.updateIndexMetaData( + newIndexMeta("index", Settings.builder().put(key, newSize.toString()).build())); + assertEquals(actual, settings.getGenerationThresholdSize()); + } + public void testArchiveBrokenIndexSettings() { Settings settings = IndexScopedSettings.DEFAULT_SCOPED_SETTINGS.archiveUnknownOrInvalidSettings( diff --git a/core/src/test/java/org/elasticsearch/index/IndexTests.java b/core/src/test/java/org/elasticsearch/index/IndexTests.java index d55630b1357..c39a43e8490 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexTests.java @@ -50,7 +50,7 @@ public class IndexTests extends ESTestCase { } public void testXContent() throws IOException { - final String name = randomAsciiOfLengthBetween(4, 15); + final String name = randomAlphaOfLengthBetween(4, 15); final String uuid = UUIDs.randomBase64UUID(); final Index original = new Index(name, uuid); final XContentBuilder builder = JsonXContent.contentBuilder(); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/KeywordMarkerFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/KeywordMarkerFilterFactoryTests.java new file mode 100644 index 00000000000..3298537af97 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/analysis/KeywordMarkerFilterFactoryTests.java @@ -0,0 +1,103 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.core.WhitespaceTokenizer; +import org.apache.lucene.analysis.miscellaneous.PatternKeywordMarkerFilter; +import org.apache.lucene.analysis.miscellaneous.SetKeywordMarkerFilter; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.test.ESTestCase.TestAnalysis; +import org.elasticsearch.test.ESTokenStreamTestCase; + +import java.io.IOException; + +import static org.hamcrest.Matchers.instanceOf; + +/** + * Tests for the {@link KeywordMarkerTokenFilterFactory} class. + */ +public class KeywordMarkerFilterFactoryTests extends ESTokenStreamTestCase { + + /** + * Tests using a keyword set for the keyword marker filter. + */ + public void testKeywordSet() throws IOException { + Settings settings = Settings.builder() + .put("index.analysis.filter.my_keyword.type", "keyword_marker") + .put("index.analysis.filter.my_keyword.keywords", "running, sleeping") + .put("index.analysis.analyzer.my_keyword.type", "custom") + .put("index.analysis.analyzer.my_keyword.tokenizer", "standard") + .put("index.analysis.analyzer.my_keyword.filter", "my_keyword, porter_stem") + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings); + TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_keyword"); + assertThat(tokenFilter, instanceOf(KeywordMarkerTokenFilterFactory.class)); + TokenStream filter = tokenFilter.create(new WhitespaceTokenizer()); + assertThat(filter, instanceOf(SetKeywordMarkerFilter.class)); + NamedAnalyzer analyzer = analysis.indexAnalyzers.get("my_keyword"); + // jogging is not part of the keywords set, so verify that its the only stemmed word + assertAnalyzesTo(analyzer, "running jogging sleeping", + new String[] { "running", "jog", "sleeping" }); + } + + /** + * Tests using a regular expression pattern for the keyword marker filter. + */ + public void testKeywordPattern() throws IOException { + Settings settings = Settings.builder() + .put("index.analysis.filter.my_keyword.type", "keyword_marker") + .put("index.analysis.filter.my_keyword.keywords_pattern", "run[a-z]ing") + .put("index.analysis.analyzer.my_keyword.type", "custom") + .put("index.analysis.analyzer.my_keyword.tokenizer", "standard") + .put("index.analysis.analyzer.my_keyword.filter", "my_keyword, porter_stem") + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings); + TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_keyword"); + assertThat(tokenFilter, instanceOf(KeywordMarkerTokenFilterFactory.class)); + TokenStream filter = tokenFilter.create(new WhitespaceTokenizer()); + assertThat(filter, instanceOf(PatternKeywordMarkerFilter.class)); + NamedAnalyzer analyzer = analysis.indexAnalyzers.get("my_keyword"); + // running should match the pattern, so it should not be stemmed but sleeping should + assertAnalyzesTo(analyzer, "running sleeping", new String[] { "running", "sleep" }); + } + + /** + * Verifies that both keywords and patterns cannot be specified together. + */ + public void testCannotSpecifyBothKeywordsAndPattern() throws IOException { + Settings settings = Settings.builder() + .put("index.analysis.filter.my_keyword.type", "keyword_marker") + .put("index.analysis.filter.my_keyword.keywords", "running") + .put("index.analysis.filter.my_keyword.keywords_pattern", "run[a-z]ing") + .put("index.analysis.analyzer.my_keyword.type", "custom") + .put("index.analysis.analyzer.my_keyword.tokenizer", "standard") + .put("index.analysis.analyzer.my_keyword.filter", "my_keyword, porter_stem") + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings)); + assertEquals("cannot specify both `keywords_pattern` and `keywords` or `keywords_path`", + e.getMessage()); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 6c7d2dd810f..65bebc40933 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -3248,7 +3248,7 @@ public class InternalEngineTests extends ESTestCase { }; noOpEngine = createEngine(defaultSettings, store, primaryTranslogDir, newMergePolicy(), null, () -> seqNoService); final long primaryTerm = randomNonNegativeLong(); - final String reason = randomAsciiOfLength(16); + final String reason = randomAlphaOfLength(16); noOpEngine.noOp( new Engine.NoOp( null, diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java index 75f53b8a7e3..c96292d90e5 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java @@ -109,9 +109,9 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes IndexSearcher searcher = new IndexSearcher(readerContext.reader()); TopFieldDocs topDocs; - + SortField sortField = indexFieldData.sortField(null, MultiValueMode.MIN, null, false); topDocs = searcher.search(new MatchAllDocsQuery(), 10, - new Sort(new SortField("value", indexFieldData.comparatorSource(null, MultiValueMode.MIN, null)))); + new Sort(sortField)); assertThat(topDocs.totalHits, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(1)); assertThat(toString(((FieldDoc) topDocs.scoreDocs[0]).fields[0]), equalTo(one())); @@ -120,8 +120,9 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes assertThat(topDocs.scoreDocs[2].doc, equalTo(2)); assertThat(toString(((FieldDoc) topDocs.scoreDocs[2]).fields[0]), equalTo(three())); + sortField = indexFieldData.sortField(null, MultiValueMode.MAX, null, true); topDocs = searcher.search(new MatchAllDocsQuery(), 10, - new Sort(new SortField("value", indexFieldData.comparatorSource(null, MultiValueMode.MAX, null), true))); + new Sort(sortField)); assertThat(topDocs.totalHits, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(2)); assertThat(topDocs.scoreDocs[1].doc, equalTo(0)); @@ -182,14 +183,16 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes assertValues(bytesValues, 2, three()); IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); - TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField("value", indexFieldData.comparatorSource(null, MultiValueMode.MIN, null)))); + SortField sortField = indexFieldData.sortField(null, MultiValueMode.MIN, null, false); + TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); assertThat(topDocs.totalHits, equalTo(3)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(1)); assertThat(topDocs.scoreDocs[1].doc, equalTo(0)); - assertThat(topDocs.scoreDocs[2].doc, equalTo(2)); - - topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField("value", indexFieldData.comparatorSource(null, MultiValueMode.MAX, null), true))); + assertThat(topDocs.scoreDocs[2].doc, equalTo(2)) + ; + sortField = indexFieldData.sortField(null, MultiValueMode.MAX, null, true); + topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); assertThat(topDocs.totalHits, equalTo(3)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); @@ -245,8 +248,10 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes IndexFieldData indexFieldData = getForField("value"); IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); + SortField sortField = + indexFieldData.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, - new Sort(new SortField("value", indexFieldData.comparatorSource(null, MultiValueMode.MIN, null)))); + new Sort(sortField)); assertThat(topDocs.totalHits, equalTo(8)); assertThat(topDocs.scoreDocs.length, equalTo(8)); assertThat(topDocs.scoreDocs[0].doc, equalTo(7)); @@ -266,8 +271,9 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes assertThat(topDocs.scoreDocs[7].doc, equalTo(5)); assertThat((BytesRef) ((FieldDoc) topDocs.scoreDocs[7]).fields[0], equalTo(null)); + sortField = indexFieldData.sortField(null, MultiValueMode.MAX, null, true); topDocs = searcher.search(new MatchAllDocsQuery(), 10, - new Sort(new SortField("value", indexFieldData.comparatorSource(null, MultiValueMode.MAX, null), true))); + new Sort(sortField)); assertThat(topDocs.totalHits, equalTo(8)); assertThat(topDocs.scoreDocs.length, equalTo(8)); assertThat(topDocs.scoreDocs[0].doc, equalTo(6)); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java index b3652ec9167..c2416278878 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java @@ -264,8 +264,8 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI final IndexFieldData indexFieldData = getForField("value"); final String missingValue = values[1]; IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); - XFieldComparatorSource comparator = indexFieldData.comparatorSource(missingValue, MultiValueMode.MIN, null); - TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(new SortField("value", comparator, reverse))); + SortField sortField = indexFieldData.sortField(missingValue, MultiValueMode.MIN, null, reverse); + TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(sortField)); assertEquals(numDocs, topDocs.totalHits); BytesRef previousValue = reverse ? UnicodeUtil.BIG_TERM : new BytesRef(); for (int i = 0; i < topDocs.scoreDocs.length; ++i) { @@ -318,8 +318,8 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI } final IndexFieldData indexFieldData = getForField("value"); IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); - XFieldComparatorSource comparator = indexFieldData.comparatorSource(first ? "_first" : "_last", MultiValueMode.MIN, null); - TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(new SortField("value", comparator, reverse))); + SortField sortField = indexFieldData.sortField(first ? "_first" : "_last", MultiValueMode.MIN, null, reverse); + TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(sortField)); assertEquals(numDocs, topDocs.totalHits); BytesRef previousValue = first ? null : reverse ? UnicodeUtil.BIG_TERM : new BytesRef(); for (int i = 0; i < topDocs.scoreDocs.length; ++i) { diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java index 9fdede24d5c..33170eb39ec 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java @@ -20,6 +20,8 @@ package org.elasticsearch.index.fielddata; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.SortField; +import org.elasticsearch.common.Nullable; import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; @@ -53,8 +55,9 @@ public class NoOrdinalsStringFieldDataTests extends PagedBytesStringFieldDataTes } @Override - public XFieldComparatorSource comparatorSource(Object missingValue, MultiValueMode sortMode, Nested nested) { - return new BytesRefFieldComparatorSource(this, missingValue, sortMode, nested); + public SortField sortField(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse) { + XFieldComparatorSource source = new BytesRefFieldComparatorSource(this, missingValue, sortMode, nested); + return new SortField(getFieldName(), source, reverse); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java index b200a42b3f8..f426e5433c6 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java @@ -172,9 +172,8 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTestCase { public void testSorting() throws Exception { IndexFieldData indexFieldData = getForField(parentType); IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); - IndexFieldData.XFieldComparatorSource comparator = indexFieldData.comparatorSource("_last", MultiValueMode.MIN, null); - - TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField(ParentFieldMapper.joinField(parentType), comparator, false))); + SortField sortField = indexFieldData.sortField("_last", MultiValueMode.MIN, null, false); + TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); assertThat(topDocs.totalHits, equalTo(8)); assertThat(topDocs.scoreDocs.length, equalTo(8)); assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); @@ -194,7 +193,8 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTestCase { assertThat(topDocs.scoreDocs[7].doc, equalTo(7)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[7]).fields[0]), equalTo(null)); - topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField(ParentFieldMapper.joinField(parentType), comparator, true))); + sortField = indexFieldData.sortField("_last", MultiValueMode.MIN, null, true); + topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); assertThat(topDocs.totalHits, equalTo(8)); assertThat(topDocs.scoreDocs.length, equalTo(8)); assertThat(topDocs.scoreDocs[0].doc, equalTo(3)); diff --git a/core/src/test/java/org/elasticsearch/index/get/GetFieldTests.java b/core/src/test/java/org/elasticsearch/index/get/GetFieldTests.java index 3fd6ca87e9b..d2968b6d8ae 100644 --- a/core/src/test/java/org/elasticsearch/index/get/GetFieldTests.java +++ b/core/src/test/java/org/elasticsearch/index/get/GetFieldTests.java @@ -89,10 +89,10 @@ public class GetFieldTests extends ESTestCase { public static Tuple randomGetField(XContentType xContentType) { if (randomBoolean()) { String fieldName = randomFrom(ParentFieldMapper.NAME, RoutingFieldMapper.NAME, UidFieldMapper.NAME); - GetField getField = new GetField(fieldName, Collections.singletonList(randomAsciiOfLengthBetween(3, 10))); + GetField getField = new GetField(fieldName, Collections.singletonList(randomAlphaOfLengthBetween(3, 10))); return Tuple.tuple(getField, getField); } - String fieldName = randomAsciiOfLengthBetween(3, 10); + String fieldName = randomAlphaOfLengthBetween(3, 10); Tuple, List> tuple = RandomObjects.randomStoredFieldValues(random(), xContentType); GetField input = new GetField(fieldName, tuple.v1()); GetField expected = new GetField(fieldName, tuple.v2()); diff --git a/core/src/test/java/org/elasticsearch/index/get/GetResultTests.java b/core/src/test/java/org/elasticsearch/index/get/GetResultTests.java index 4e5b94d9c98..93ba260ee4d 100644 --- a/core/src/test/java/org/elasticsearch/index/get/GetResultTests.java +++ b/core/src/test/java/org/elasticsearch/index/get/GetResultTests.java @@ -173,9 +173,9 @@ public class GetResultTests extends ESTestCase { } public static Tuple randomGetResult(XContentType xContentType) { - final String index = randomAsciiOfLengthBetween(3, 10); - final String type = randomAsciiOfLengthBetween(3, 10); - final String id = randomAsciiOfLengthBetween(3, 10); + final String index = randomAlphaOfLengthBetween(3, 10); + final String type = randomAlphaOfLengthBetween(3, 10); + final String id = randomAlphaOfLengthBetween(3, 10); final long version; final boolean exists; BytesReference source = null; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java index e66e0532737..f1a2e97f0bf 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java @@ -30,15 +30,6 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.RootObjectMapper; -import org.elasticsearch.index.mapper.TextFieldMapper; -import org.elasticsearch.index.mapper.TokenCountFieldMapper; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -174,7 +165,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase { public void testMultiFieldsInConsistentOrder() throws Exception { String[] multiFieldNames = new String[randomIntBetween(2, 10)]; for (int i = 0; i < multiFieldNames.length; i++) { - multiFieldNames[i] = randomAsciiOfLength(4); + multiFieldNames[i] = randomAlphaOfLength(4); } XContentBuilder builder = jsonBuilder().startObject().startObject("type").startObject("properties") diff --git a/core/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java index f70db120fcf..b3d7db23c38 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java @@ -19,10 +19,10 @@ package org.elasticsearch.index.mapper; import com.carrotsearch.randomizedtesting.generators.RandomPicks; -import org.apache.lucene.document.DoubleRangeField; -import org.apache.lucene.document.FloatRangeField; -import org.apache.lucene.document.IntRangeField; -import org.apache.lucene.document.LongRangeField; +import org.apache.lucene.document.DoubleRange; +import org.apache.lucene.document.FloatRange; +import org.apache.lucene.document.IntRange; +import org.apache.lucene.document.LongRange; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.Query; import org.elasticsearch.Version; @@ -73,7 +73,7 @@ public class RangeFieldTypeTests extends FieldTypeTestCase { public void testRangeQuery() throws Exception { Settings indexSettings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAsciiOfLengthBetween(1, 10), indexSettings); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAlphaOfLengthBetween(1, 10), indexSettings); QueryShardContext context = new QueryShardContext(0, idxSettings, null, null, null, null, null, xContentRegistry(), null, null, () -> nowInMillis); RangeFieldMapper.RangeFieldType ft = new RangeFieldMapper.RangeFieldType(type); @@ -113,44 +113,44 @@ public class RangeFieldTypeTests extends FieldTypeTestCase { int[] lower = new int[] {from + (includeLower ? 0 : 1)}; int[] upper = new int[] {to - (includeUpper ? 0 : 1)}; if (relation == ShapeRelation.WITHIN) { - return IntRangeField.newWithinQuery(FIELDNAME, lower, upper); + return IntRange.newWithinQuery(FIELDNAME, lower, upper); } else if (relation == ShapeRelation.CONTAINS) { - return IntRangeField.newContainsQuery(FIELDNAME, lower, upper); + return IntRange.newContainsQuery(FIELDNAME, lower, upper); } - return IntRangeField.newIntersectsQuery(FIELDNAME, lower, upper); + return IntRange.newIntersectsQuery(FIELDNAME, lower, upper); } private Query getLongRangeQuery(ShapeRelation relation, long from, long to, boolean includeLower, boolean includeUpper) { long[] lower = new long[] {from + (includeLower ? 0 : 1)}; long[] upper = new long[] {to - (includeUpper ? 0 : 1)}; if (relation == ShapeRelation.WITHIN) { - return LongRangeField.newWithinQuery(FIELDNAME, lower, upper); + return LongRange.newWithinQuery(FIELDNAME, lower, upper); } else if (relation == ShapeRelation.CONTAINS) { - return LongRangeField.newContainsQuery(FIELDNAME, lower, upper); + return LongRange.newContainsQuery(FIELDNAME, lower, upper); } - return LongRangeField.newIntersectsQuery(FIELDNAME, lower, upper); + return LongRange.newIntersectsQuery(FIELDNAME, lower, upper); } private Query getFloatRangeQuery(ShapeRelation relation, float from, float to, boolean includeLower, boolean includeUpper) { float[] lower = new float[] {includeLower ? from : Math.nextUp(from)}; float[] upper = new float[] {includeUpper ? to : Math.nextDown(to)}; if (relation == ShapeRelation.WITHIN) { - return FloatRangeField.newWithinQuery(FIELDNAME, lower, upper); + return FloatRange.newWithinQuery(FIELDNAME, lower, upper); } else if (relation == ShapeRelation.CONTAINS) { - return FloatRangeField.newContainsQuery(FIELDNAME, lower, upper); + return FloatRange.newContainsQuery(FIELDNAME, lower, upper); } - return FloatRangeField.newIntersectsQuery(FIELDNAME, lower, upper); + return FloatRange.newIntersectsQuery(FIELDNAME, lower, upper); } private Query getDoubleRangeQuery(ShapeRelation relation, double from, double to, boolean includeLower, boolean includeUpper) { double[] lower = new double[] {includeLower ? from : Math.nextUp(from)}; double[] upper = new double[] {includeUpper ? to : Math.nextDown(to)}; if (relation == ShapeRelation.WITHIN) { - return DoubleRangeField.newWithinQuery(FIELDNAME, lower, upper); + return DoubleRange.newWithinQuery(FIELDNAME, lower, upper); } else if (relation == ShapeRelation.CONTAINS) { - return DoubleRangeField.newContainsQuery(FIELDNAME, lower, upper); + return DoubleRange.newContainsQuery(FIELDNAME, lower, upper); } - return DoubleRangeField.newIntersectsQuery(FIELDNAME, lower, upper); + return DoubleRange.newIntersectsQuery(FIELDNAME, lower, upper); } private Object nextFrom() { diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractTermQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractTermQueryTestCase.java index 60ba7aa3f7b..1f58e785219 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractTermQueryTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractTermQueryTestCase.java @@ -30,7 +30,7 @@ public abstract class AbstractTermQueryTestCase createQueryBuilder(null, term)); assertEquals("field name is null or empty", e.getMessage()); e = expectThrows(IllegalArgumentException.class, () -> createQueryBuilder("", term)); diff --git a/core/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java index a7245826a2d..66c36e0418a 100644 --- a/core/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java @@ -44,13 +44,13 @@ public class CommonTermsQueryBuilderTests extends AbstractQueryTestCase getAlternateVersions() { Map alternateVersions = new HashMap<>(); - CommonTermsQueryBuilder commonTermsQuery = new CommonTermsQueryBuilder(randomAsciiOfLengthBetween(1, 10), - randomAsciiOfLengthBetween(1, 10)); + CommonTermsQueryBuilder commonTermsQuery = new CommonTermsQueryBuilder(randomAlphaOfLengthBetween(1, 10), + randomAlphaOfLengthBetween(1, 10)); String contentString = "{\n" + " \"common\" : {\n" + " \"" + commonTermsQuery.fieldName() + "\" : \"" + commonTermsQuery.value() + "\"\n" + diff --git a/core/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java index 95720e5e325..cfc2d789420 100644 --- a/core/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/ExistsQueryBuilderTests.java @@ -43,7 +43,7 @@ public class ExistsQueryBuilderTests extends AbstractQueryTestCase getAlternateVersions() { Map alternateVersions = new HashMap<>(); - FuzzyQueryBuilder fuzzyQuery = new FuzzyQueryBuilder(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); + FuzzyQueryBuilder fuzzyQuery = new FuzzyQueryBuilder(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)); String contentString = "{\n" + " \"fuzzy\" : {\n" + " \"" + fuzzyQuery.fieldName() + "\" : \"" + fuzzyQuery.value() + "\"\n" + @@ -190,4 +190,17 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase parseQuery(shortJson)); assertEquals("[fuzzy] query doesn't support multiple fields, found [message1] and [message2]", e.getMessage()); } + + public void testParseFailsWithValueArray() { + String query = "{\n" + + " \"fuzzy\" : {\n" + + " \"message1\" : {\n" + + " \"value\" : [ \"one\", \"two\", \"three\"]\n" + + " }\n" + + " }\n" + + "}"; + + ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(query)); + assertEquals("[fuzzy] unexpected token [START_ARRAY] after [value]", e.getMessage()); + } } diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java index 6ce95892769..ddc4810b871 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java @@ -70,15 +70,15 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase if (frequently()) { types[i] = randomFrom(getCurrentTypes()); } else { - types[i] = randomAsciiOfLengthBetween(1, 10); + types[i] = randomAlphaOfLengthBetween(1, 10); } } } else { @@ -58,7 +58,7 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase int numberOfIds = randomIntBetween(0, 10); String[] ids = new String[numberOfIds]; for (int i = 0; i < numberOfIds; i++) { - ids[i] = randomAsciiOfLengthBetween(1, 10); + ids[i] = randomAlphaOfLengthBetween(1, 10); } IdsQueryBuilder query; if (types.length > 0 || randomBoolean()) { diff --git a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java index b46f3f3aa19..99deef9cde7 100644 --- a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java @@ -297,16 +297,16 @@ public class InnerHitBuilderTests extends ESTestCase { public static InnerHitBuilder randomInnerHits(boolean recursive, boolean includeQueryTypeOrPath) { InnerHitBuilder innerHits = new InnerHitBuilder(); - innerHits.setName(randomAsciiOfLengthBetween(1, 16)); + innerHits.setName(randomAlphaOfLengthBetween(1, 16)); innerHits.setFrom(randomIntBetween(0, 128)); innerHits.setSize(randomIntBetween(0, 128)); innerHits.setExplain(randomBoolean()); innerHits.setVersion(randomBoolean()); innerHits.setTrackScores(randomBoolean()); if (randomBoolean()) { - innerHits.setStoredFieldNames(randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16))); + innerHits.setStoredFieldNames(randomListStuff(16, () -> randomAlphaOfLengthBetween(1, 16))); } - innerHits.setDocValueFields(randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16))); + innerHits.setDocValueFields(randomListStuff(16, () -> randomAlphaOfLengthBetween(1, 16))); // Random script fields deduped on their field name. Map scriptFields = new HashMap<>(); for (SearchSourceBuilder.ScriptField field: randomListStuff(16, InnerHitBuilderTests::randomScript)) { @@ -325,7 +325,7 @@ public class InnerHitBuilderTests extends ESTestCase { innerHits.setFetchSourceContext(randomFetchSourceContext); if (randomBoolean()) { innerHits.setSorts(randomListStuff(16, - () -> SortBuilders.fieldSort(randomAsciiOfLengthBetween(5, 20)).order(randomFrom(SortOrder.values()))) + () -> SortBuilders.fieldSort(randomAlphaOfLengthBetween(5, 20)).order(randomFrom(SortOrder.values()))) ); } innerHits.setHighlightBuilder(HighlightBuilderTests.randomHighlighterBuilder()); @@ -337,11 +337,11 @@ public class InnerHitBuilderTests extends ESTestCase { } if (includeQueryTypeOrPath) { - QueryBuilder query = new MatchQueryBuilder(randomAsciiOfLengthBetween(1, 16), randomAsciiOfLengthBetween(1, 16)); + QueryBuilder query = new MatchQueryBuilder(randomAlphaOfLengthBetween(1, 16), randomAlphaOfLengthBetween(1, 16)); if (randomBoolean()) { - return new InnerHitBuilder(innerHits, randomAsciiOfLength(8), query, randomBoolean()); + return new InnerHitBuilder(innerHits, randomAlphaOfLength(8), query, randomBoolean()); } else { - return new InnerHitBuilder(innerHits, query, randomAsciiOfLength(8), randomBoolean()); + return new InnerHitBuilder(innerHits, query, randomAlphaOfLength(8), randomBoolean()); } } else { return innerHits; @@ -366,14 +366,14 @@ public class InnerHitBuilderTests extends ESTestCase { modifiers.add(() -> copy.setExplain(!copy.isExplain())); modifiers.add(() -> copy.setVersion(!copy.isVersion())); modifiers.add(() -> copy.setTrackScores(!copy.isTrackScores())); - modifiers.add(() -> copy.setName(randomValueOtherThan(copy.getName(), () -> randomAsciiOfLengthBetween(1, 16)))); + modifiers.add(() -> copy.setName(randomValueOtherThan(copy.getName(), () -> randomAlphaOfLengthBetween(1, 16)))); modifiers.add(() -> { if (randomBoolean()) { copy.setDocValueFields(randomValueOtherThan(copy.getDocValueFields(), () -> { - return randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16)); + return randomListStuff(16, () -> randomAlphaOfLengthBetween(1, 16)); })); } else { - copy.addDocValueField(randomAsciiOfLengthBetween(1, 16)); + copy.addDocValueField(randomAlphaOfLengthBetween(1, 16)); } }); modifiers.add(() -> { @@ -400,12 +400,12 @@ public class InnerHitBuilderTests extends ESTestCase { if (randomBoolean()) { final List> sortBuilders = randomValueOtherThan(copy.getSorts(), () -> { List> builders = randomListStuff(16, - () -> SortBuilders.fieldSort(randomAsciiOfLengthBetween(5, 20)).order(randomFrom(SortOrder.values()))); + () -> SortBuilders.fieldSort(randomAlphaOfLengthBetween(5, 20)).order(randomFrom(SortOrder.values()))); return builders; }); copy.setSorts(sortBuilders); } else { - copy.addSort(SortBuilders.fieldSort(randomAsciiOfLengthBetween(5, 20))); + copy.addSort(SortBuilders.fieldSort(randomAlphaOfLengthBetween(5, 20))); } }); modifiers.add(() -> copy @@ -415,10 +415,10 @@ public class InnerHitBuilderTests extends ESTestCase { List previous = copy.getStoredFieldsContext() == null ? Collections.emptyList() : copy.getStoredFieldsContext().fieldNames(); List newValues = randomValueOtherThan(previous, - () -> randomListStuff(1, 16, () -> randomAsciiOfLengthBetween(1, 16))); + () -> randomListStuff(1, 16, () -> randomAlphaOfLengthBetween(1, 16))); copy.setStoredFieldNames(newValues); } else { - copy.getStoredFieldsContext().addFieldName(randomAsciiOfLengthBetween(1, 16)); + copy.getStoredFieldsContext().addFieldName(randomAlphaOfLengthBetween(1, 16)); } }); randomFrom(modifiers).run(); @@ -431,12 +431,12 @@ public class InnerHitBuilderTests extends ESTestCase { if (randomBoolean()) { int numEntries = randomIntBetween(0, 32); for (int i = 0; i < numEntries; i++) { - randomMap.put(String.valueOf(i), randomAsciiOfLength(16)); + randomMap.put(String.valueOf(i), randomAlphaOfLength(16)); } } - Script script = new Script(randomScriptType, randomScriptType == ScriptType.STORED ? null : randomAsciiOfLengthBetween(1, 4), - randomAsciiOfLength(128), randomMap); - return new SearchSourceBuilder.ScriptField(randomAsciiOfLengthBetween(1, 32), script, randomBoolean()); + Script script = new Script(randomScriptType, randomScriptType == ScriptType.STORED ? null : randomAlphaOfLengthBetween(1, 4), + randomAlphaOfLength(128), randomMap); + return new SearchSourceBuilder.ScriptField(randomAlphaOfLengthBetween(1, 32), script, randomBoolean()); } static List randomListStuff(int maxSize, Supplier valueSupplier) { diff --git a/core/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java index 0d654940e09..b4d5f98fe0b 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java @@ -52,7 +52,7 @@ public class MatchPhrasePrefixQueryBuilderTests extends AbstractQueryTestCase getAlternateVersions() { Map alternateVersions = new HashMap<>(); - MatchPhrasePrefixQueryBuilder matchPhrasePrefixQuery = new MatchPhrasePrefixQueryBuilder(randomAsciiOfLengthBetween(1, 10), - randomAsciiOfLengthBetween(1, 10)); + MatchPhrasePrefixQueryBuilder matchPhrasePrefixQuery = new MatchPhrasePrefixQueryBuilder(randomAlphaOfLengthBetween(1, 10), + randomAlphaOfLengthBetween(1, 10)); String contentString = "{\n" + " \"match_phrase_prefix\" : {\n" + " \"" + matchPhrasePrefixQuery.fieldName() + "\" : \"" + matchPhrasePrefixQuery.value() + "\"\n" + diff --git a/core/src/test/java/org/elasticsearch/index/query/MatchPhraseQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MatchPhraseQueryBuilderTests.java index 2b295dd1141..d37d6eeb88f 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MatchPhraseQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MatchPhraseQueryBuilderTests.java @@ -52,7 +52,7 @@ public class MatchPhraseQueryBuilderTests extends AbstractQueryTestCase getAlternateVersions() { Map alternateVersions = new HashMap<>(); - MatchPhraseQueryBuilder matchPhraseQuery = new MatchPhraseQueryBuilder(randomAsciiOfLengthBetween(1, 10), - randomAsciiOfLengthBetween(1, 10)); + MatchPhraseQueryBuilder matchPhraseQuery = new MatchPhraseQueryBuilder(randomAlphaOfLengthBetween(1, 10), + randomAlphaOfLengthBetween(1, 10)); String contentString = "{\n" + " \"match_phrase\" : {\n" + " \"" + matchPhraseQuery.fieldName() + "\" : \"" + matchPhraseQuery.value() + "\"\n" + diff --git a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index 08c0f47feb1..40ebb0e4a15 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -70,7 +70,7 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase getAlternateVersions() { Map alternateVersions = new HashMap<>(); - MatchQueryBuilder matchQuery = new MatchQueryBuilder(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); + MatchQueryBuilder matchQuery = new MatchQueryBuilder(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)); String contentString = "{\n" + " \"match\" : {\n" + " \"" + matchQuery.fieldName() + "\" : \"" + matchQuery.value() + "\"\n" + diff --git a/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java index ee1265eca65..f2d66409e9e 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java @@ -96,7 +96,7 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase fields = new HashMap<>(); for (int i = 0; i < fieldCount; i++) { if (randomBoolean()) { - fields.put(randomAsciiOfLengthBetween(1, 10), AbstractQueryBuilder.DEFAULT_BOOST); + fields.put(randomAlphaOfLengthBetween(1, 10), AbstractQueryBuilder.DEFAULT_BOOST); } else { - fields.put(randomBoolean() ? STRING_FIELD_NAME : randomAsciiOfLengthBetween(1, 10), 2.0f / randomIntBetween(1, 20)); + fields.put(randomBoolean() ? STRING_FIELD_NAME : randomAlphaOfLengthBetween(1, 10), 2.0f / randomIntBetween(1, 20)); } } result.fields(fields); @@ -197,7 +197,7 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase { @@ -68,7 +66,7 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase indexFieldData, String missingValue, MultiValueMode sortMode, int n, boolean reverse) throws IOException { Query parentFilter = new TermQuery(new Term("__type", "parent")); Query childFilter = new TermQuery(new Term("__type", "child")); - XFieldComparatorSource nestedComparatorSource = indexFieldData.comparatorSource(missingValue, sortMode, createNested(searcher, parentFilter, childFilter)); + SortField sortField = indexFieldData.sortField(missingValue, sortMode, createNested(searcher, parentFilter, childFilter), reverse); Query query = new ConstantScoreQuery(parentFilter); - Sort sort = new Sort(new SortField("f", nestedComparatorSource, reverse)); + Sort sort = new Sort(sortField); return searcher.search(query, n, sort); } diff --git a/core/src/test/java/org/elasticsearch/index/seqno/GlobalCheckpointTests.java b/core/src/test/java/org/elasticsearch/index/seqno/GlobalCheckpointTests.java index 7d6dd25403b..58f66dc62e7 100644 --- a/core/src/test/java/org/elasticsearch/index/seqno/GlobalCheckpointTests.java +++ b/core/src/test/java/org/elasticsearch/index/seqno/GlobalCheckpointTests.java @@ -115,7 +115,7 @@ public class GlobalCheckpointTests extends ESTestCase { allocations.keySet().forEach(aId -> tracker.updateLocalCheckpoint(aId, allocations.get(aId))); // now insert an unknown active/insync id , the checkpoint shouldn't change but a refresh should be requested. - final String extraId = "extra_" + randomAsciiOfLength(5); + final String extraId = "extra_" + randomAlphaOfLength(5); // first check that adding it without the master blessing doesn't change anything. tracker.updateLocalCheckpoint(extraId, maxLocalCheckpoint + 1 + randomInt(4)); diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java index 97c96c8af12..d203832fb15 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -213,7 +213,7 @@ public class IndexShardIT extends ESSingleNodeTestCase { public void testIndexDirIsDeletedWhenShardRemoved() throws Exception { Environment env = getInstanceFromNode(Environment.class); - Path idxPath = env.sharedDataFile().resolve(randomAsciiOfLength(10)); + Path idxPath = env.sharedDataFile().resolve(randomAlphaOfLength(10)); logger.info("--> idxPath: [{}]", idxPath); Settings idxSettings = Settings.builder() .put(IndexMetaData.SETTING_DATA_PATH, idxPath) @@ -246,10 +246,10 @@ public class IndexShardIT extends ESSingleNodeTestCase { public void testIndexCanChangeCustomDataPath() throws Exception { Environment env = getInstanceFromNode(Environment.class); - Path idxPath = env.sharedDataFile().resolve(randomAsciiOfLength(10)); + Path idxPath = env.sharedDataFile().resolve(randomAlphaOfLength(10)); final String INDEX = "idx"; - Path startDir = idxPath.resolve("start-" + randomAsciiOfLength(10)); - Path endDir = idxPath.resolve("end-" + randomAsciiOfLength(10)); + Path startDir = idxPath.resolve("start-" + randomAlphaOfLength(10)); + Path endDir = idxPath.resolve("end-" + randomAlphaOfLength(10)); logger.info("--> start dir: [{}]", startDir.toAbsolutePath().toString()); logger.info("--> end dir: [{}]", endDir.toAbsolutePath().toString()); // temp dirs are automatically created, but the end dir is what @@ -363,49 +363,105 @@ public class IndexShardIT extends ESSingleNodeTestCase { assertEquals(0, shard.getEngine().getTranslog().totalOperations()); } - public void testStressMaybeFlush() throws Exception { + public void testMaybeRollTranslogGeneration() throws Exception { + final int generationThreshold = randomIntBetween(64, 512); + final Settings settings = + Settings + .builder() + .put("index.number_of_shards", 1) + .put("index.translog.generation_threshold_size", generationThreshold + "b") + .put() + .build(); + createIndex("test", settings); + ensureGreen("test"); + final IndicesService indicesService = getInstanceFromNode(IndicesService.class); + final IndexService test = indicesService.indexService(resolveIndex("test")); + final IndexShard shard = test.getShardOrNull(0); + int rolls = 0; + final Translog translog = shard.getEngine().getTranslog(); + final long generation = translog.currentFileGeneration(); + final int numberOfDocuments = randomIntBetween(32, 128); + for (int i = 0; i < numberOfDocuments; i++) { + assertThat(translog.currentFileGeneration(), equalTo(generation + rolls)); + final ParsedDocument doc = testParsedDocument( + "1", + "test", + null, + SequenceNumbersService.UNASSIGNED_SEQ_NO, + new ParseContext.Document(), + new BytesArray(new byte[]{1}), XContentType.JSON, null); + final Engine.Index index = new Engine.Index(new Term("_uid", doc.uid()), doc); + final Engine.IndexResult result = shard.index(index); + final Translog.Location location = result.getTranslogLocation(); + shard.afterWriteOperation(); + if (location.translogLocation + location.size > generationThreshold) { + // wait until the roll completes + assertBusy(() -> assertFalse(shard.shouldRollTranslogGeneration())); + rolls++; + assertThat(translog.currentFileGeneration(), equalTo(generation + rolls)); + } + } + } + + public void testStressMaybeFlushOrRollTranslogGeneration() throws Exception { createIndex("test"); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService(resolveIndex("test")); final IndexShard shard = test.getShardOrNull(0); assertFalse(shard.shouldFlush()); - client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put( - IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), - new ByteSizeValue(117/* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); - client().prepareIndex("test", "test", "0").setSource("{}", XContentType.JSON) - .setRefreshPolicy(randomBoolean() ? IMMEDIATE : NONE).get(); + final String key; + final boolean flush = randomBoolean(); + if (flush) { + key = "index.translog.flush_threshold_size"; + } else { + key = "index.translog.generation_threshold_size"; + } + // size of the operation plus header and footer + final Settings settings = Settings.builder().put(key, "117b").build(); + client().admin().indices().prepareUpdateSettings("test").setSettings(settings).get(); + client().prepareIndex("test", "test", "0") + .setSource("{}", XContentType.JSON) + .setRefreshPolicy(randomBoolean() ? IMMEDIATE : NONE) + .get(); assertFalse(shard.shouldFlush()); final AtomicBoolean running = new AtomicBoolean(true); final int numThreads = randomIntBetween(2, 4); - Thread[] threads = new Thread[numThreads]; - CyclicBarrier barrier = new CyclicBarrier(numThreads + 1); + final Thread[] threads = new Thread[numThreads]; + final CyclicBarrier barrier = new CyclicBarrier(numThreads + 1); for (int i = 0; i < threads.length; i++) { - threads[i] = new Thread() { - @Override - public void run() { - try { - barrier.await(); - } catch (InterruptedException | BrokenBarrierException e) { - throw new RuntimeException(e); - } - while (running.get()) { - shard.maybeFlush(); - } + threads[i] = new Thread(() -> { + try { + barrier.await(); + } catch (final InterruptedException | BrokenBarrierException e) { + throw new RuntimeException(e); } - }; + while (running.get()) { + shard.afterWriteOperation(); + } + }); threads[i].start(); } barrier.await(); - FlushStats flushStats = shard.flushStats(); - long total = flushStats.getTotal(); - client().prepareIndex("test", "test", "1").setSource("{}", XContentType.JSON).get(); - assertBusy(() -> assertEquals(total + 1, shard.flushStats().getTotal())); + final Runnable check; + if (flush) { + final FlushStats flushStats = shard.flushStats(); + final long total = flushStats.getTotal(); + client().prepareIndex("test", "test", "1").setSource("{}", XContentType.JSON).get(); + check = () -> assertEquals(total + 1, shard.flushStats().getTotal()); + } else { + final long generation = shard.getEngine().getTranslog().currentFileGeneration(); + client().prepareIndex("test", "test", "1").setSource("{}", XContentType.JSON).get(); + check = () -> assertEquals( + generation + 1, + shard.getEngine().getTranslog().currentFileGeneration()); + } + assertBusy(check); running.set(false); for (int i = 0; i < threads.length; i++) { threads[i].join(); } - assertEquals(total + 1, shard.flushStats().getTotal()); + check.run(); } public void testShardHasMemoryBufferOnTranslogRecover() throws Throwable { diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexingOperationListenerTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexingOperationListenerTests.java index 2eb91a16d80..88d8a075e1b 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexingOperationListenerTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexingOperationListenerTests.java @@ -44,7 +44,7 @@ public class IndexingOperationListenerTests extends ESTestCase{ AtomicInteger preDelete = new AtomicInteger(); AtomicInteger postDelete = new AtomicInteger(); AtomicInteger postDeleteException = new AtomicInteger(); - ShardId randomShardId = new ShardId(new Index(randomAsciiOfLength(10), randomAsciiOfLength(10)), randomIntBetween(1, 10)); + ShardId randomShardId = new ShardId(new Index(randomAlphaOfLength(10), randomAlphaOfLength(10)), randomIntBetween(1, 10)); IndexingOperationListener listener = new IndexingOperationListener() { @Override public Engine.Index preIndex(ShardId shardId, Engine.Index operation) { diff --git a/core/src/test/java/org/elasticsearch/index/shard/ShardIdTests.java b/core/src/test/java/org/elasticsearch/index/shard/ShardIdTests.java index dd05c10af96..d7f6d147604 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/ShardIdTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/ShardIdTests.java @@ -25,7 +25,7 @@ import org.elasticsearch.test.ESTestCase; public class ShardIdTests extends ESTestCase { public void testShardIdFromString() { - String indexName = randomAsciiOfLengthBetween(3,50); + String indexName = randomAlphaOfLengthBetween(3,50); int shardId = randomInt(); ShardId id = ShardId.fromString("["+indexName+"]["+shardId+"]"); assertEquals(indexName, id.getIndexName()); diff --git a/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java b/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java index 653eea4196c..e9183876aec 100644 --- a/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java +++ b/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.similarity; +import org.apache.lucene.search.similarities.BooleanSimilarity; import org.apache.lucene.search.similarities.ClassicSimilarity; import org.apache.lucene.search.similarities.DFISimilarity; import org.apache.lucene.search.similarities.AfterEffectL; @@ -64,6 +65,7 @@ public class SimilarityTests extends ESSingleNodeTestCase { SimilarityService similarityService = createIndex("foo").similarityService(); assertThat(similarityService.getSimilarity("classic").get(), instanceOf(ClassicSimilarity.class)); assertThat(similarityService.getSimilarity("BM25").get(), instanceOf(BM25Similarity.class)); + assertThat(similarityService.getSimilarity("boolean").get(), instanceOf(BooleanSimilarity.class)); assertThat(similarityService.getSimilarity("default"), equalTo(null)); } @@ -109,6 +111,21 @@ public class SimilarityTests extends ESSingleNodeTestCase { assertThat(similarity.getDiscountOverlaps(), equalTo(false)); } + public void testResolveSimilaritiesFromMapping_boolean() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field1").field("type", "text").field("similarity", "boolean").endObject() + .endObject() + .endObject().endObject().string(); + + IndexService indexService = createIndex("foo", Settings.EMPTY); + DocumentMapper documentMapper = indexService.mapperService() + .documentMapperParser() + .parse("type", new CompressedXContent(mapping)); + assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), + instanceOf(BooleanSimilarityProvider.class)); + } + public void testResolveSimilaritiesFromMapping_DFR() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") diff --git a/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java b/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java index 2733d80e36a..5f1578488cf 100644 --- a/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java +++ b/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java @@ -48,7 +48,7 @@ public class FileInfoTests extends ESTestCase { for (int i = 0; i < hash.length; i++) { hash.bytes[i] = randomByte(); } - StoreFileMetaData meta = new StoreFileMetaData("foobar", Math.abs(randomLong()), randomAsciiOfLengthBetween(1, 10), Version.LATEST, hash); + StoreFileMetaData meta = new StoreFileMetaData("foobar", Math.abs(randomLong()), randomAlphaOfLengthBetween(1, 10), Version.LATEST, hash); ByteSizeValue size = new ByteSizeValue(Math.abs(randomLong())); BlobStoreIndexShardSnapshot.FileInfo info = new BlobStoreIndexShardSnapshot.FileInfo("_foobar", meta, size); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).prettyPrint(); diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index e47a5652b24..6b2aa5e5921 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -41,16 +41,18 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine.Operation.Origin; @@ -100,6 +102,7 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; +import static org.elasticsearch.common.util.BigArrays.NON_RECYCLING_INSTANCE; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -156,12 +159,25 @@ public class TranslogTests extends ESTestCase { return new Translog(getTranslogConfig(path), null, () -> globalCheckpoint.get()); } - private TranslogConfig getTranslogConfig(Path path) { - Settings build = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT) - .build(); - ByteSizeValue bufferSize = randomBoolean() ? TranslogConfig.DEFAULT_BUFFER_SIZE : new ByteSizeValue(10 + randomInt(128 * 1024), ByteSizeUnit.BYTES); - return new TranslogConfig(shardId, path, IndexSettingsModule.newIndexSettings(shardId.getIndex(), build), BigArrays.NON_RECYCLING_INSTANCE, bufferSize); + private TranslogConfig getTranslogConfig(final Path path) { + final Settings settings = Settings + .builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT) + .build(); + return getTranslogConfig(path, settings); + } + + private TranslogConfig getTranslogConfig(final Path path, final Settings settings) { + final ByteSizeValue bufferSize; + if (randomBoolean()) { + bufferSize = TranslogConfig.DEFAULT_BUFFER_SIZE; + } else { + bufferSize = new ByteSizeValue(10 + randomInt(128 * 1024), ByteSizeUnit.BYTES); + } + + final IndexSettings indexSettings = + IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings); + return new TranslogConfig(shardId, path, indexSettings, NON_RECYCLING_INSTANCE, bufferSize); } protected void addToTranslogAndList(Translog translog, ArrayList list, Translog.Operation op) throws IOException { @@ -242,7 +258,7 @@ public class TranslogTests extends ESTestCase { final long seqNo = randomNonNegativeLong(); final long primaryTerm = randomNonNegativeLong(); - final String reason = randomAsciiOfLength(16); + final String reason = randomAlphaOfLength(16); addToTranslogAndList(translog, ops, new Translog.NoOp(seqNo, primaryTerm, reason)); snapshot = translog.newSnapshot(); @@ -322,7 +338,7 @@ public class TranslogTests extends ESTestCase { final long seqNo = 1; final long primaryTerm = 1; - translog.add(new Translog.NoOp(seqNo, primaryTerm, randomAsciiOfLength(16))); + translog.add(new Translog.NoOp(seqNo, primaryTerm, randomAlphaOfLength(16))); { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(4L)); @@ -547,7 +563,7 @@ public class TranslogTests extends ESTestCase { int translogOperations = randomIntBetween(10, 100); for (int op = 0; op < translogOperations; op++) { - String ascii = randomAsciiOfLengthBetween(1, 50); + String ascii = randomAlphaOfLengthBetween(1, 50); locations.add(translog.add(new Translog.Index("test", "" + op, ascii.getBytes("UTF-8")))); } translog.sync(); @@ -573,7 +589,7 @@ public class TranslogTests extends ESTestCase { int translogOperations = randomIntBetween(10, 100); for (int op = 0; op < translogOperations; op++) { - String ascii = randomAsciiOfLengthBetween(1, 50); + String ascii = randomAlphaOfLengthBetween(1, 50); locations.add(translog.add(new Translog.Index("test", "" + op, ascii.getBytes("UTF-8")))); } translog.sync(); @@ -1390,7 +1406,7 @@ public class TranslogTests extends ESTestCase { randomFrom(VersionType.values())); break; case NO_OP: - op = new Translog.NoOp(randomNonNegativeLong(), randomNonNegativeLong(), randomAsciiOfLength(16)); + op = new Translog.NoOp(randomNonNegativeLong(), randomNonNegativeLong(), randomAlphaOfLength(16)); break; default: throw new AssertionError("unsupported operation type [" + type + "]"); @@ -2073,4 +2089,93 @@ public class TranslogTests extends ESTestCase { Translog.Delete serializedDelete = new Translog.Delete(in); assertEquals(delete, serializedDelete); } + + public void testRollGeneration() throws IOException { + final long generation = translog.currentFileGeneration(); + final int rolls = randomIntBetween(1, 16); + int totalOperations = 0; + int seqNo = 0; + for (int i = 0; i < rolls; i++) { + final int operations = randomIntBetween(1, 128); + for (int j = 0; j < operations; j++) { + translog.add(new Translog.NoOp(seqNo++, 0, "test")); + totalOperations++; + } + try (ReleasableLock ignored = translog.writeLock.acquire()) { + translog.rollGeneration(); + } + assertThat(translog.currentFileGeneration(), equalTo(generation + i + 1)); + assertThat(translog.totalOperations(), equalTo(totalOperations)); + } + for (int i = 0; i <= rolls; i++) { + assertFileIsPresent(translog, generation + i); + } + translog.commit(); + assertThat(translog.currentFileGeneration(), equalTo(generation + rolls + 1)); + assertThat(translog.totalOperations(), equalTo(0)); + for (int i = 0; i <= rolls; i++) { + assertFileDeleted(translog, generation + i); + } + assertFileIsPresent(translog, generation + rolls + 1); + } + + public void testRollGenerationBetweenPrepareCommitAndCommit() throws IOException { + final long generation = translog.currentFileGeneration(); + int seqNo = 0; + + final int rollsBefore = randomIntBetween(0, 16); + for (int r = 1; r <= rollsBefore; r++) { + final int operationsBefore = randomIntBetween(1, 256); + for (int i = 0; i < operationsBefore; i++) { + translog.add(new Translog.NoOp(seqNo++, 0, "test")); + } + + try (Releasable ignored = translog.writeLock.acquire()) { + translog.rollGeneration(); + } + + assertThat(translog.currentFileGeneration(), equalTo(generation + r)); + for (int i = 0; i <= r; i++) { + assertFileIsPresent(translog, generation + r); + } + } + + assertThat(translog.currentFileGeneration(), equalTo(generation + rollsBefore)); + translog.prepareCommit(); + assertThat(translog.currentFileGeneration(), equalTo(generation + rollsBefore + 1)); + + for (int i = 0; i <= rollsBefore + 1; i++) { + assertFileIsPresent(translog, generation + i); + } + + final int rollsBetween = randomIntBetween(0, 16); + for (int r = 1; r <= rollsBetween; r++) { + final int operationsBetween = randomIntBetween(1, 256); + for (int i = 0; i < operationsBetween; i++) { + translog.add(new Translog.NoOp(seqNo++, 0, "test")); + } + + try (Releasable ignored = translog.writeLock.acquire()) { + translog.rollGeneration(); + } + + assertThat( + translog.currentFileGeneration(), + equalTo(generation + rollsBefore + 1 + r)); + for (int i = 0; i <= rollsBefore + 1 + r; i++) { + assertFileIsPresent(translog, generation + i); + } + } + + translog.commit(); + + for (int i = 0; i <= rollsBefore; i++) { + assertFileDeleted(translog, generation + i); + } + for (int i = rollsBefore + 1; i <= rollsBefore + 1 + rollsBetween; i++) { + assertFileIsPresent(translog, generation + i); + } + + } + } diff --git a/core/src/test/java/org/elasticsearch/indexing/IndexActionIT.java b/core/src/test/java/org/elasticsearch/indexing/IndexActionIT.java index 9b5cb7a138d..63f889179a2 100644 --- a/core/src/test/java/org/elasticsearch/indexing/IndexActionIT.java +++ b/core/src/test/java/org/elasticsearch/indexing/IndexActionIT.java @@ -195,7 +195,7 @@ public class IndexActionIT extends ESIntegTestCase { int min = MetaDataCreateIndexService.MAX_INDEX_NAME_BYTES + 1; int max = MetaDataCreateIndexService.MAX_INDEX_NAME_BYTES * 2; try { - createIndex(randomAsciiOfLengthBetween(min, max).toLowerCase(Locale.ROOT)); + createIndex(randomAlphaOfLengthBetween(min, max).toLowerCase(Locale.ROOT)); fail("exception should have been thrown on too-long index name"); } catch (InvalidIndexNameException e) { assertThat("exception contains message about index name too long: " + e.getMessage(), @@ -203,7 +203,7 @@ public class IndexActionIT extends ESIntegTestCase { } try { - client().prepareIndex(randomAsciiOfLengthBetween(min, max).toLowerCase(Locale.ROOT), "mytype").setSource("foo", "bar").get(); + client().prepareIndex(randomAlphaOfLengthBetween(min, max).toLowerCase(Locale.ROOT), "mytype").setSource("foo", "bar").get(); fail("exception should have been thrown on too-long index name"); } catch (InvalidIndexNameException e) { assertThat("exception contains message about index name too long: " + e.getMessage(), @@ -212,7 +212,7 @@ public class IndexActionIT extends ESIntegTestCase { try { // Catch chars that are more than a single byte - client().prepareIndex(randomAsciiOfLength(MetaDataCreateIndexService.MAX_INDEX_NAME_BYTES - 1).toLowerCase(Locale.ROOT) + + client().prepareIndex(randomAlphaOfLength(MetaDataCreateIndexService.MAX_INDEX_NAME_BYTES - 1).toLowerCase(Locale.ROOT) + "Ϟ".toLowerCase(Locale.ROOT), "mytype").setSource("foo", "bar").get(); fail("exception should have been thrown on too-long index name"); @@ -222,7 +222,7 @@ public class IndexActionIT extends ESIntegTestCase { } // we can create an index of max length - createIndex(randomAsciiOfLength(MetaDataCreateIndexService.MAX_INDEX_NAME_BYTES).toLowerCase(Locale.ROOT)); + createIndex(randomAlphaOfLength(MetaDataCreateIndexService.MAX_INDEX_NAME_BYTES).toLowerCase(Locale.ROOT)); } public void testInvalidIndexName() { diff --git a/core/src/test/java/org/elasticsearch/indices/NodeIndicesStatsTests.java b/core/src/test/java/org/elasticsearch/indices/NodeIndicesStatsTests.java index f712a5ba843..b8ae55a44d2 100644 --- a/core/src/test/java/org/elasticsearch/indices/NodeIndicesStatsTests.java +++ b/core/src/test/java/org/elasticsearch/indices/NodeIndicesStatsTests.java @@ -31,7 +31,7 @@ public class NodeIndicesStatsTests extends ESTestCase { public void testInvalidLevel() { final NodeIndicesStats stats = new NodeIndicesStats(); - final String level = randomAsciiOfLength(16); + final String level = randomAlphaOfLength(16); final ToXContent.Params params = new ToXContent.MapParams(Collections.singletonMap("level", level)); final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> stats.toXContent(null, params)); assertThat( diff --git a/core/src/test/java/org/elasticsearch/indices/TermsLookupTests.java b/core/src/test/java/org/elasticsearch/indices/TermsLookupTests.java index fea69133377..07fe5e00a24 100644 --- a/core/src/test/java/org/elasticsearch/indices/TermsLookupTests.java +++ b/core/src/test/java/org/elasticsearch/indices/TermsLookupTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.indices; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.indices.TermsLookup; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -30,11 +29,11 @@ import static org.hamcrest.Matchers.containsString; public class TermsLookupTests extends ESTestCase { public void testTermsLookup() { - String index = randomAsciiOfLengthBetween(1, 10); - String type = randomAsciiOfLengthBetween(1, 10); - String id = randomAsciiOfLengthBetween(1, 10); - String path = randomAsciiOfLengthBetween(1, 10); - String routing = randomAsciiOfLengthBetween(1, 10); + String index = randomAlphaOfLengthBetween(1, 10); + String type = randomAlphaOfLengthBetween(1, 10); + String id = randomAlphaOfLengthBetween(1, 10); + String path = randomAlphaOfLengthBetween(1, 10); + String routing = randomAlphaOfLengthBetween(1, 10); TermsLookup termsLookup = new TermsLookup(index, type, id, path); termsLookup.routing(routing); assertEquals(index, termsLookup.index()); @@ -45,9 +44,9 @@ public class TermsLookupTests extends ESTestCase { } public void testIllegalArguments() { - String type = randomAsciiOfLength(5); - String id = randomAsciiOfLength(5); - String path = randomAsciiOfLength(5); + String type = randomAlphaOfLength(5); + String id = randomAlphaOfLength(5); + String path = randomAlphaOfLength(5); switch (randomIntBetween(0, 2)) { case 0: type = null; @@ -81,10 +80,10 @@ public class TermsLookupTests extends ESTestCase { public static TermsLookup randomTermsLookup() { return new TermsLookup( - randomBoolean() ? randomAsciiOfLength(10) : null, - randomAsciiOfLength(10), - randomAsciiOfLength(10), - randomAsciiOfLength(10).replace('.', '_') - ).routing(randomBoolean() ? randomAsciiOfLength(10) : null); + randomBoolean() ? randomAlphaOfLength(10) : null, + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10).replace('.', '_') + ).routing(randomBoolean() ? randomAlphaOfLength(10) : null); } } diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java index 5186480d5cd..d6e93ce559e 100644 --- a/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java @@ -54,7 +54,7 @@ public class PreBuiltAnalyzerIntegrationIT extends ESIntegTestCase { List indexNames = new ArrayList<>(); final int numIndices = scaledRandomIntBetween(2, 4); for (int i = 0; i < numIndices; i++) { - String indexName = randomAsciiOfLength(10).toLowerCase(Locale.ROOT); + String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); indexNames.add(indexName); int randomInt = randomInt(PreBuiltAnalyzers.values().length-1); @@ -91,7 +91,7 @@ public class PreBuiltAnalyzerIntegrationIT extends ESIntegTestCase { String randomId = randomInt() + ""; Map data = new HashMap<>(); - data.put("foo", randomAsciiOfLength(scaledRandomIntBetween(5, 50))); + data.put("foo", randomAlphaOfLength(scaledRandomIntBetween(5, 50))); index(randomIndex, "type", randomId, data); } diff --git a/core/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java b/core/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java index 37f6c989229..064e9d78b51 100644 --- a/core/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java +++ b/core/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java @@ -159,7 +159,7 @@ public class IndicesClusterStateServiceRandomUpdatesTests extends AbstractIndice */ public void testJoiningNewClusterOnlyRemovesInMemoryIndexStructures() { // a cluster state derived from the initial state that includes a created index - String name = "index_" + randomAsciiOfLength(8).toLowerCase(Locale.ROOT); + String name = "index_" + randomAlphaOfLength(8).toLowerCase(Locale.ROOT); ShardRoutingState[] replicaStates = new ShardRoutingState[randomIntBetween(0, 3)]; Arrays.fill(replicaStates, ShardRoutingState.INITIALIZING); ClusterState stateWithIndex = ClusterStateCreationUtils.state(name, randomBoolean(), ShardRoutingState.INITIALIZING, replicaStates); @@ -245,7 +245,7 @@ public class IndicesClusterStateServiceRandomUpdatesTests extends AbstractIndice if (state.metaData().indices().size() > 200) { break; } - String name = "index_" + randomAsciiOfLength(15).toLowerCase(Locale.ROOT); + String name = "index_" + randomAlphaOfLength(15).toLowerCase(Locale.ROOT); Settings.Builder settingsBuilder = Settings.builder() .put(SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 3)) .put(SETTING_NUMBER_OF_REPLICAS, randomInt(2)); diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index 48c8c2e5f28..a21f7757eac 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -531,7 +531,7 @@ public class IndexRecoveryIT extends ESIntegTestCase { for (int i = 0; i < numDocs; i++) { docs[i] = client().prepareIndex(name, INDEX_TYPE). setSource("foo-int", randomInt(), - "foo-string", randomAsciiOfLength(32), + "foo-string", randomAlphaOfLength(32), "foo-float", randomFloat()); } diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryTargetTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryTargetTests.java index 1245725e055..4f893c946ec 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryTargetTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryTargetTests.java @@ -521,7 +521,7 @@ public class RecoveryTargetTests extends ESTestCase { @Override public void run() { for (int i = 0; i < 1000; i++) { - index.addFileDetail(randomAsciiOfLength(10), 100, true); + index.addFileDetail(randomAlphaOfLength(10), 100, true); } stop.set(true); } diff --git a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java index ae6b4588271..188498b56d0 100644 --- a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java @@ -44,6 +44,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; @@ -66,7 +67,7 @@ public class UpdateSettingsIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Arrays.asList(DummySettingPlugin.class); + return Arrays.asList(DummySettingPlugin.class, FinalSettingPlugin.class); } public static class DummySettingPlugin extends Plugin { @@ -86,6 +87,19 @@ public class UpdateSettingsIT extends ESIntegTestCase { } } + public static class FinalSettingPlugin extends Plugin { + public static final Setting FINAL_SETTING = Setting.simpleString("index.final", + Setting.Property.IndexScope, Setting.Property.Final); + @Override + public void onIndexModule(IndexModule indexModule) { + } + + @Override + public List> getSettings() { + return Collections.singletonList(FINAL_SETTING); + } + } + public void testResetDefault() { createIndex("test"); @@ -93,7 +107,11 @@ public class UpdateSettingsIT extends ESIntegTestCase { .admin() .indices() .prepareUpdateSettings("test") - .setSettings(Settings.builder().put("index.refresh_interval", -1).put("index.translog.flush_threshold_size", "1024b")) + .setSettings( + Settings.builder() + .put("index.refresh_interval", -1) + .put("index.translog.flush_threshold_size", "1024b") + .put("index.translog.generation_threshold_size", "4096b")) .execute() .actionGet(); IndexMetaData indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); @@ -103,6 +121,7 @@ public class UpdateSettingsIT extends ESIntegTestCase { if (indexService != null) { assertEquals(indexService.getIndexSettings().getRefreshInterval().millis(), -1); assertEquals(indexService.getIndexSettings().getFlushThresholdSize().getBytes(), 1024); + assertEquals(indexService.getIndexSettings().getGenerationThresholdSize().getBytes(), 4096); } } client() @@ -119,12 +138,13 @@ public class UpdateSettingsIT extends ESIntegTestCase { if (indexService != null) { assertEquals(indexService.getIndexSettings().getRefreshInterval().millis(), 1000); assertEquals(indexService.getIndexSettings().getFlushThresholdSize().getBytes(), 1024); + assertEquals(indexService.getIndexSettings().getGenerationThresholdSize().getBytes(), 4096); } } } public void testOpenCloseUpdateSettings() throws Exception { createIndex("test"); - try { + expectThrows(IllegalArgumentException.class, () -> client() .admin() .indices() @@ -133,20 +153,29 @@ public class UpdateSettingsIT extends ESIntegTestCase { .put("index.refresh_interval", -1) // this one can change .put("index.fielddata.cache", "none")) // this one can't .execute() - .actionGet(); - fail(); - } catch (IllegalArgumentException e) { - // all is well - } - + .actionGet() + ); + expectThrows(IllegalArgumentException.class, () -> + client() + .admin() + .indices() + .prepareUpdateSettings("test") + .setSettings(Settings.builder() + .put("index.refresh_interval", -1) // this one can change + .put("index.final", "no")) // this one can't + .execute() + .actionGet() + ); IndexMetaData indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); assertThat(indexMetaData.getSettings().get("index.refresh_interval"), nullValue()); assertThat(indexMetaData.getSettings().get("index.fielddata.cache"), nullValue()); + assertThat(indexMetaData.getSettings().get("index.final"), nullValue()); // Now verify via dedicated get settings api: GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); assertThat(getSettingsResponse.getSetting("test", "index.refresh_interval"), nullValue()); assertThat(getSettingsResponse.getSetting("test", "index.fielddata.cache"), nullValue()); + assertThat(getSettingsResponse.getSetting("test", "index.final"), nullValue()); client() .admin() @@ -204,10 +233,27 @@ public class UpdateSettingsIT extends ESIntegTestCase { assertThat(indexMetaData.getSettings().get("index.refresh_interval"), equalTo("1s")); assertThat(indexMetaData.getSettings().get("index.fielddata.cache"), equalTo("none")); + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> + client() + .admin() + .indices() + .prepareUpdateSettings("test") + .setSettings(Settings.builder() + .put("index.refresh_interval", -1) // this one can change + .put("index.final", "no")) // this one really can't + .execute() + .actionGet() + ); + assertThat(ex.getMessage(), containsString("final test setting [index.final], not updateable")); + indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); + assertThat(indexMetaData.getSettings().get("index.refresh_interval"), equalTo("1s")); + assertThat(indexMetaData.getSettings().get("index.final"), nullValue()); + + // Now verify via dedicated get settings api: getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); assertThat(getSettingsResponse.getSetting("test", "index.refresh_interval"), equalTo("1s")); - assertThat(getSettingsResponse.getSetting("test", "index.fielddata.cache"), equalTo("none")); + assertThat(getSettingsResponse.getSetting("test", "index.final"), nullValue()); } public void testEngineGCDeletesSetting() throws InterruptedException { diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java index 7f0e9350488..1c091d6b015 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java @@ -20,38 +20,22 @@ package org.elasticsearch.indices.store; import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.TestThreadPool; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.junit.After; -import org.junit.AfterClass; import org.junit.Before; -import org.junit.BeforeClass; import java.util.Arrays; import java.util.HashSet; import java.util.Set; -import java.util.concurrent.TimeUnit; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; -import static org.elasticsearch.Version.CURRENT; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; -import static org.elasticsearch.test.VersionUtils.randomVersion; public class IndicesStoreTests extends ESTestCase { private static final ShardRoutingState[] NOT_STARTED_STATES; @@ -94,8 +78,8 @@ public class IndicesStoreTests extends ESTestCase { if (state == ShardRoutingState.UNASSIGNED) { unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null); } - String relocatingNodeId = state == ShardRoutingState.RELOCATING ? randomAsciiOfLength(10) : null; - routingTable.addShard(TestShardRouting.newShardRouting("test", i, randomAsciiOfLength(10), relocatingNodeId, j == 0, state, unassignedInfo)); + String relocatingNodeId = state == ShardRoutingState.RELOCATING ? randomAlphaOfLength(10) : null; + routingTable.addShard(TestShardRouting.newShardRouting("test", i, randomAlphaOfLength(10), relocatingNodeId, j == 0, state, unassignedInfo)); } } @@ -111,10 +95,10 @@ public class IndicesStoreTests extends ESTestCase { for (int i = 0; i < numShards; i++) { int localNodeIndex = randomInt(numReplicas); boolean primaryOnLocalNode = i == localShardId && localNodeIndex == numReplicas; - routingTable.addShard(TestShardRouting.newShardRouting("test", i, primaryOnLocalNode ? localNode.getId() : randomAsciiOfLength(10), true, ShardRoutingState.STARTED)); + routingTable.addShard(TestShardRouting.newShardRouting("test", i, primaryOnLocalNode ? localNode.getId() : randomAlphaOfLength(10), true, ShardRoutingState.STARTED)); for (int j = 0; j < numReplicas; j++) { boolean replicaOnLocalNode = i == localShardId && localNodeIndex == j; - routingTable.addShard(TestShardRouting.newShardRouting("test", i, replicaOnLocalNode ? localNode.getId() : randomAsciiOfLength(10), false, ShardRoutingState.STARTED)); + routingTable.addShard(TestShardRouting.newShardRouting("test", i, replicaOnLocalNode ? localNode.getId() : randomAlphaOfLength(10), false, ShardRoutingState.STARTED)); } } diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java b/core/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java index 1bd5676f474..e1ec12e29b6 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java @@ -912,12 +912,12 @@ public class IngestDocumentTests extends ESTestCase { Map sourceAndMetadata = RandomDocumentPicks.randomSource(random()); int numFields = randomIntBetween(1, IngestDocument.MetaData.values().length); for (int i = 0; i < numFields; i++) { - sourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); + sourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAlphaOfLengthBetween(5, 10)); } Map ingestMetadata = new HashMap<>(); numFields = randomIntBetween(1, 5); for (int i = 0; i < numFields; i++) { - ingestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10)); + ingestMetadata.put(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)); } IngestDocument ingestDocument = new IngestDocument(sourceAndMetadata, ingestMetadata); @@ -932,7 +932,7 @@ public class IngestDocumentTests extends ESTestCase { if (randomBoolean()) { numFields = randomIntBetween(1, IngestDocument.MetaData.values().length); for (int i = 0; i < numFields; i++) { - otherSourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10)); + otherSourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAlphaOfLengthBetween(5, 10)); } changed = true; } @@ -942,7 +942,7 @@ public class IngestDocumentTests extends ESTestCase { otherIngestMetadata = new HashMap<>(); numFields = randomIntBetween(1, 5); for (int i = 0; i < numFields; i++) { - otherIngestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10)); + otherIngestMetadata.put(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)); } changed = true; } else { diff --git a/core/src/test/java/org/elasticsearch/monitor/fs/DeviceStatsTests.java b/core/src/test/java/org/elasticsearch/monitor/fs/DeviceStatsTests.java index 758f67ff563..85aeed9485f 100644 --- a/core/src/test/java/org/elasticsearch/monitor/fs/DeviceStatsTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/fs/DeviceStatsTests.java @@ -28,7 +28,7 @@ public class DeviceStatsTests extends ESTestCase { public void testDeviceStats() { final int majorDeviceNumber = randomIntBetween(1, 1 << 8); final int minorDeviceNumber = randomIntBetween(0, 1 << 5); - final String deviceName = randomAsciiOfLength(3); + final String deviceName = randomAlphaOfLength(3); final int readsCompleted = randomIntBetween(1, 1 << 16); final int sectorsRead = randomIntBetween(8 * readsCompleted, 16 * readsCompleted); final int writesCompleted = randomIntBetween(1, 1 << 16); diff --git a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceSettingsTests.java b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceSettingsTests.java index dc374f13339..48817e52d56 100644 --- a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceSettingsTests.java @@ -55,7 +55,7 @@ public class JvmGcMonitorServiceSettingsTests extends ESTestCase { } public void testNegativeSetting() throws InterruptedException { - String collector = randomAsciiOfLength(5); + String collector = randomAlphaOfLength(5); Settings settings = Settings.builder().put("monitor.jvm.gc.collector." + collector + ".warn", "-" + randomTimeValue()).build(); execute(settings, (command, interval, name) -> null, e -> { assertThat(e, instanceOf(IllegalArgumentException.class)); @@ -64,7 +64,7 @@ public class JvmGcMonitorServiceSettingsTests extends ESTestCase { } public void testMissingSetting() throws InterruptedException { - String collector = randomAsciiOfLength(5); + String collector = randomAlphaOfLength(5); Set> entries = new HashSet<>(); entries.add(new AbstractMap.SimpleEntry<>("monitor.jvm.gc.collector." + collector + ".warn", randomPositiveTimeValue())); entries.add(new AbstractMap.SimpleEntry<>("monitor.jvm.gc.collector." + collector + ".info", randomPositiveTimeValue())); diff --git a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceTests.java b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceTests.java index ca3a1d7070d..711988d5a82 100644 --- a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceTests.java @@ -37,7 +37,7 @@ public class JvmGcMonitorServiceTests extends ESTestCase { when(logger.isInfoEnabled()).thenReturn(true); when(logger.isDebugEnabled()).thenReturn(true); final JvmGcMonitorService.JvmMonitor.Threshold threshold = randomFrom(JvmGcMonitorService.JvmMonitor.Threshold.values()); - final String name = randomAsciiOfLength(16); + final String name = randomAlphaOfLength(16); final long seq = randomIntBetween(1, 1 << 30); final int elapsedValue = randomIntBetween(1, 1 << 10); final long totalCollectionCount = randomIntBetween(1, 16); diff --git a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java index 4cceddb6a96..71305c41f56 100644 --- a/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/os/OsProbeTests.java @@ -148,7 +148,7 @@ public class OsProbeTests extends ESTestCase { assumeTrue("test runs on Linux only", Constants.LINUX); final boolean areCgroupStatsAvailable = randomBoolean(); - final String hierarchy = randomAsciiOfLength(16); + final String hierarchy = randomAlphaOfLength(16); final OsProbe probe = new OsProbe() { diff --git a/core/src/test/java/org/elasticsearch/monitor/os/OsStatsTests.java b/core/src/test/java/org/elasticsearch/monitor/os/OsStatsTests.java index 9f8b60a55a9..f1e2371db5c 100644 --- a/core/src/test/java/org/elasticsearch/monitor/os/OsStatsTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/os/OsStatsTests.java @@ -37,9 +37,9 @@ public class OsStatsTests extends ESTestCase { OsStats.Mem mem = new OsStats.Mem(randomLong(), randomLong()); OsStats.Swap swap = new OsStats.Swap(randomLong(), randomLong()); OsStats.Cgroup cgroup = new OsStats.Cgroup( - randomAsciiOfLength(8), + randomAlphaOfLength(8), randomNonNegativeLong(), - randomAsciiOfLength(8), + randomAlphaOfLength(8), randomNonNegativeLong(), randomNonNegativeLong(), new OsStats.Cgroup.CpuStat(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong())); diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java index 9c36a7a649a..f830e8b6272 100644 --- a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java +++ b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java @@ -113,9 +113,9 @@ public class NodeInfoStreamingTests extends ESTestCase { int availableProcessors = randomIntBetween(1, 64); int allocatedProcessors = randomIntBetween(1, availableProcessors); long refreshInterval = randomBoolean() ? -1 : randomNonNegativeLong(); - String name = randomAsciiOfLengthBetween(3, 10); - String arch = randomAsciiOfLengthBetween(3, 10); - String version = randomAsciiOfLengthBetween(3, 10); + String name = randomAlphaOfLengthBetween(3, 10); + String arch = randomAlphaOfLengthBetween(3, 10); + String version = randomAlphaOfLengthBetween(3, 10); osInfo = new OsInfo(refreshInterval, availableProcessors, allocatedProcessors, name, arch, version); } ProcessInfo process = randomBoolean() ? null : new ProcessInfo(randomInt(), randomBoolean(), randomNonNegativeLong()); @@ -125,7 +125,7 @@ public class NodeInfoStreamingTests extends ESTestCase { int numThreadPools = randomIntBetween(1, 10); List threadPoolInfos = new ArrayList<>(numThreadPools); for (int i = 0; i < numThreadPools; i++) { - threadPoolInfos.add(new ThreadPool.Info(randomAsciiOfLengthBetween(3, 10), + threadPoolInfos.add(new ThreadPool.Info(randomAlphaOfLengthBetween(3, 10), randomFrom(ThreadPool.ThreadPoolType.values()), randomInt())); } threadPoolInfo = new ThreadPoolInfo(threadPoolInfos); @@ -142,14 +142,14 @@ public class NodeInfoStreamingTests extends ESTestCase { int numPlugins = randomIntBetween(0, 5); List plugins = new ArrayList<>(); for (int i = 0; i < numPlugins; i++) { - plugins.add(new PluginInfo(randomAsciiOfLengthBetween(3, 10), randomAsciiOfLengthBetween(3, 10), - randomAsciiOfLengthBetween(3, 10), randomAsciiOfLengthBetween(3, 10))); + plugins.add(new PluginInfo(randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10), + randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10), randomBoolean())); } int numModules = randomIntBetween(0, 5); List modules = new ArrayList<>(); for (int i = 0; i < numModules; i++) { - modules.add(new PluginInfo(randomAsciiOfLengthBetween(3, 10), randomAsciiOfLengthBetween(3, 10), - randomAsciiOfLengthBetween(3, 10), randomAsciiOfLengthBetween(3, 10))); + modules.add(new PluginInfo(randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10), + randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10), randomBoolean())); } pluginsAndModules = new PluginsAndModules(plugins, modules); } @@ -159,7 +159,7 @@ public class NodeInfoStreamingTests extends ESTestCase { int numProcessors = randomIntBetween(0, 5); List processors = new ArrayList<>(numProcessors); for (int i = 0; i < numProcessors; i++) { - processors.add(new ProcessorInfo(randomAsciiOfLengthBetween(3, 10))); + processors.add(new ProcessorInfo(randomAlphaOfLengthBetween(3, 10))); } ingestInfo = new IngestInfo(processors); } diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java index 4ad52be8866..04afdd58391 100644 --- a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java +++ b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java @@ -56,7 +56,7 @@ public class PluginInfoTests extends ESTestCase { PluginInfo.readFromProperties(pluginDir); fail("expected missing name exception"); } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("Property [name] is missing in")); + assertTrue(e.getMessage().contains("property [name] is missing in")); } PluginTestUtil.writeProperties(pluginDir, "name", ""); @@ -64,7 +64,7 @@ public class PluginInfoTests extends ESTestCase { PluginInfo.readFromProperties(pluginDir); fail("expected missing name exception"); } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("Property [name] is missing in")); + assertTrue(e.getMessage().contains("property [name] is missing in")); } } @@ -81,7 +81,8 @@ public class PluginInfoTests extends ESTestCase { public void testReadFromPropertiesVersionMissing() throws Exception { Path pluginDir = createTempDir().resolve("fake-plugin"); - PluginTestUtil.writeProperties(pluginDir, "description", "fake desc", "name", "fake-plugin"); + PluginTestUtil.writeProperties( + pluginDir, "description", "fake desc", "name", "fake-plugin"); try { PluginInfo.readFromProperties(pluginDir); fail("expected missing version exception"); @@ -151,7 +152,11 @@ public class PluginInfoTests extends ESTestCase { PluginInfo.readFromProperties(pluginDir); fail("expected bad java version format exception"); } catch (IllegalStateException e) { - assertTrue(e.getMessage(), e.getMessage().equals("version string must be a sequence of nonnegative decimal integers separated by \".\"'s and may have leading zeros but was 1.7.0_80")); + assertTrue( + e.getMessage(), + e.getMessage().equals("version string must be a sequence of nonnegative " + + "decimal integers separated by \".\"'s and may have leading zeros " + + "but was 1.7.0_80")); } } @@ -166,7 +171,8 @@ public class PluginInfoTests extends ESTestCase { PluginInfo.readFromProperties(pluginDir); fail("expected bogus elasticsearch version exception"); } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("version needs to contain major, minor, and revision")); + assertTrue(e.getMessage().contains( + "version needs to contain major, minor, and revision")); } } @@ -181,7 +187,7 @@ public class PluginInfoTests extends ESTestCase { PluginInfo.readFromProperties(pluginDir); fail("expected old elasticsearch version exception"); } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("Was designed for version [2.0.0]")); + assertTrue(e.getMessage().contains("was designed for version [2.0.0]")); } } @@ -197,17 +203,17 @@ public class PluginInfoTests extends ESTestCase { PluginInfo.readFromProperties(pluginDir); fail("expected old elasticsearch version exception"); } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("Property [classname] is missing")); + assertTrue(e.getMessage().contains("property [classname] is missing")); } } public void testPluginListSorted() { List plugins = new ArrayList<>(); - plugins.add(new PluginInfo("c", "foo", "dummy", "dummyclass")); - plugins.add(new PluginInfo("b", "foo", "dummy", "dummyclass")); - plugins.add(new PluginInfo("e", "foo", "dummy", "dummyclass")); - plugins.add(new PluginInfo("a", "foo", "dummy", "dummyclass")); - plugins.add(new PluginInfo("d", "foo", "dummy", "dummyclass")); + plugins.add(new PluginInfo("c", "foo", "dummy", "dummyclass", randomBoolean())); + plugins.add(new PluginInfo("b", "foo", "dummy", "dummyclass", randomBoolean())); + plugins.add(new PluginInfo("e", "foo", "dummy", "dummyclass", randomBoolean())); + plugins.add(new PluginInfo("a", "foo", "dummy", "dummyclass", randomBoolean())); + plugins.add(new PluginInfo("d", "foo", "dummy", "dummyclass", randomBoolean())); PluginsAndModules pluginsInfo = new PluginsAndModules(plugins, Collections.emptyList()); @@ -215,4 +221,5 @@ public class PluginInfoTests extends ESTestCase { List names = infos.stream().map(PluginInfo::getName).collect(Collectors.toList()); assertThat(names, contains("a", "b", "c", "d", "e")); } + } diff --git a/core/src/test/java/org/elasticsearch/bootstrap/SpawnerTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginsTests.java similarity index 60% rename from core/src/test/java/org/elasticsearch/bootstrap/SpawnerTests.java rename to core/src/test/java/org/elasticsearch/plugins/PluginsTests.java index 58c112ba96d..a26bcb1991e 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/SpawnerTests.java +++ b/core/src/test/java/org/elasticsearch/plugins/PluginsTests.java @@ -17,20 +17,17 @@ * under the License. */ -package org.elasticsearch.bootstrap; +package org.elasticsearch.plugins; import org.apache.lucene.util.Constants; import org.elasticsearch.test.ESTestCase; import java.util.Locale; -/** - * Doesn't actually test spawning a process, as a system call filter is installed before tests run and forbids it. - */ -public class SpawnerTests extends ESTestCase { +public class PluginsTests extends ESTestCase { public void testMakePlatformName() { - String platformName = Spawner.makePlatformName(Constants.OS_NAME, Constants.OS_ARCH); + final String platformName = Platforms.platformName(Constants.OS_NAME, Constants.OS_ARCH); assertFalse(platformName, platformName.isEmpty()); assertTrue(platformName, platformName.equals(platformName.toLowerCase(Locale.ROOT))); @@ -40,13 +37,13 @@ public class SpawnerTests extends ESTestCase { } public void testMakeSpecificPlatformNames() { - assertEquals("darwin-x86_64", Spawner.makePlatformName("Mac OS X", "x86_64")); - assertEquals("linux-x86_64", Spawner.makePlatformName("Linux", "amd64")); - assertEquals("linux-x86", Spawner.makePlatformName("Linux", "i386")); - assertEquals("windows-x86_64", Spawner.makePlatformName("Windows Server 2008 R2", "amd64")); - assertEquals("windows-x86", Spawner.makePlatformName("Windows Server 2008", "x86")); - assertEquals("windows-x86_64", Spawner.makePlatformName("Windows 8.1", "amd64")); - assertEquals("sunos-x86_64", Spawner.makePlatformName("SunOS", "amd64")); + assertEquals("darwin-x86_64", Platforms.platformName("Mac OS X", "x86_64")); + assertEquals("linux-x86_64", Platforms.platformName("Linux", "amd64")); + assertEquals("linux-x86", Platforms.platformName("Linux", "i386")); + assertEquals("windows-x86_64", Platforms.platformName("Windows Server 2008 R2", "amd64")); + assertEquals("windows-x86", Platforms.platformName("Windows Server 2008", "x86")); + assertEquals("windows-x86_64", Platforms.platformName("Windows 8.1", "amd64")); + assertEquals("sunos-x86_64", Platforms.platformName("SunOS", "amd64")); } } diff --git a/core/src/test/java/org/elasticsearch/repositories/IndexIdTests.java b/core/src/test/java/org/elasticsearch/repositories/IndexIdTests.java index 9c39061419c..51167d862fd 100644 --- a/core/src/test/java/org/elasticsearch/repositories/IndexIdTests.java +++ b/core/src/test/java/org/elasticsearch/repositories/IndexIdTests.java @@ -36,7 +36,7 @@ public class IndexIdTests extends ESTestCase { public void testEqualsAndHashCode() { // assert equals and hashcode - String name = randomAsciiOfLength(8); + String name = randomAlphaOfLength(8); String id = UUIDs.randomBase64UUID(); IndexId indexId1 = new IndexId(name, id); IndexId indexId2 = new IndexId(name, id); @@ -49,7 +49,7 @@ public class IndexIdTests extends ESTestCase { assertEquals(indexId1, indexId2); assertEquals(indexId1.hashCode(), indexId2.hashCode()); //assert not equals when name or id differ - indexId2 = new IndexId(randomAsciiOfLength(8), id); + indexId2 = new IndexId(randomAlphaOfLength(8), id); assertNotEquals(indexId1, indexId2); assertNotEquals(indexId1.hashCode(), indexId2.hashCode()); indexId2 = new IndexId(name, UUIDs.randomBase64UUID()); @@ -58,14 +58,14 @@ public class IndexIdTests extends ESTestCase { } public void testSerialization() throws IOException { - IndexId indexId = new IndexId(randomAsciiOfLength(8), UUIDs.randomBase64UUID()); + IndexId indexId = new IndexId(randomAlphaOfLength(8), UUIDs.randomBase64UUID()); BytesStreamOutput out = new BytesStreamOutput(); indexId.writeTo(out); assertEquals(indexId, new IndexId(out.bytes().streamInput())); } public void testXContent() throws IOException { - IndexId indexId = new IndexId(randomAsciiOfLength(8), UUIDs.randomBase64UUID()); + IndexId indexId = new IndexId(randomAlphaOfLength(8), UUIDs.randomBase64UUID()); XContentBuilder builder = JsonXContent.contentBuilder(); indexId.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); diff --git a/core/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java b/core/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java index f9c620e3b9e..6c548a38cb3 100644 --- a/core/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java +++ b/core/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java @@ -68,12 +68,12 @@ public class RepositoryDataTests extends ESTestCase { // test that adding the same snapshot id to the repository data throws an exception Map indexIdMap = repositoryData.getIndices(); // test that adding a snapshot and its indices works - SnapshotId newSnapshot = new SnapshotId(randomAsciiOfLength(7), UUIDs.randomBase64UUID()); + SnapshotId newSnapshot = new SnapshotId(randomAlphaOfLength(7), UUIDs.randomBase64UUID()); List indices = new ArrayList<>(); Set newIndices = new HashSet<>(); int numNew = randomIntBetween(1, 10); for (int i = 0; i < numNew; i++) { - IndexId indexId = new IndexId(randomAsciiOfLength(7), UUIDs.randomBase64UUID()); + IndexId indexId = new IndexId(randomAlphaOfLength(7), UUIDs.randomBase64UUID()); newIndices.add(indexId); indices.add(indexId); } @@ -99,7 +99,7 @@ public class RepositoryDataTests extends ESTestCase { final int numSnapshots = randomIntBetween(1, 30); final List snapshotIds = new ArrayList<>(numSnapshots); for (int i = 0; i < numSnapshots; i++) { - snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())); + snapshotIds.add(new SnapshotId(randomAlphaOfLength(8), UUIDs.randomBase64UUID())); } RepositoryData repositoryData = new RepositoryData(EMPTY_REPO_GEN, snapshotIds, Collections.emptyMap(), Collections.emptyList()); // test that initializing indices works @@ -131,7 +131,7 @@ public class RepositoryDataTests extends ESTestCase { String indexName = indexNames.iterator().next(); IndexId indexId = indices.get(indexName); assertEquals(indexId, repositoryData.resolveIndexId(indexName)); - String notInRepoData = randomAsciiOfLength(5); + String notInRepoData = randomAlphaOfLength(5); assertFalse(indexName.contains(notInRepoData)); assertEquals(new IndexId(notInRepoData, notInRepoData), repositoryData.resolveIndexId(notInRepoData)); } @@ -149,7 +149,7 @@ public class RepositoryDataTests extends ESTestCase { final int numSnapshots = randomIntBetween(1, 30); final List snapshotIds = new ArrayList<>(origSnapshotIds); for (int i = 0; i < numSnapshots; i++) { - snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())); + snapshotIds.add(new SnapshotId(randomAlphaOfLength(8), UUIDs.randomBase64UUID())); } return snapshotIds; } @@ -159,7 +159,7 @@ public class RepositoryDataTests extends ESTestCase { final int numIndices = randomIntBetween(1, 30); final Map> indices = new HashMap<>(numIndices); for (int i = 0; i < numIndices; i++) { - final IndexId indexId = new IndexId(randomAsciiOfLength(8), UUIDs.randomBase64UUID()); + final IndexId indexId = new IndexId(randomAlphaOfLength(8), UUIDs.randomBase64UUID()); final Set indexSnapshots = new LinkedHashSet<>(); final int numIndicesForSnapshot = randomIntBetween(1, numIndices); for (int j = 0; j < numIndicesForSnapshot; j++) { diff --git a/core/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java b/core/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java index 6e538e721a4..4b57fa43cfe 100644 --- a/core/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java +++ b/core/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java @@ -179,7 +179,7 @@ public class BlobStoreRepositoryTests extends ESSingleNodeTestCase { final int numSnapshots = randomIntBetween(1, 20); final List snapshotIds = new ArrayList<>(numSnapshots); for (int i = 0; i < numSnapshots; i++) { - snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())); + snapshotIds.add(new SnapshotId(randomAlphaOfLength(8), UUIDs.randomBase64UUID())); } RepositoryData repositoryData = new RepositoryData(readData.getGenId(), Collections.emptyList(), Collections.emptyMap(), snapshotIds); @@ -210,11 +210,11 @@ public class BlobStoreRepositoryTests extends ESSingleNodeTestCase { private RepositoryData addRandomSnapshotsToRepoData(RepositoryData repoData, boolean inclIndices) { int numSnapshots = randomIntBetween(1, 20); for (int i = 0; i < numSnapshots; i++) { - SnapshotId snapshotId = new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID()); + SnapshotId snapshotId = new SnapshotId(randomAlphaOfLength(8), UUIDs.randomBase64UUID()); int numIndices = inclIndices ? randomIntBetween(0, 20) : 0; List indexIds = new ArrayList<>(numIndices); for (int j = 0; j < numIndices; j++) { - indexIds.add(new IndexId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())); + indexIds.add(new IndexId(randomAlphaOfLength(8), UUIDs.randomBase64UUID())); } repoData = repoData.addSnapshot(snapshotId, indexIds); } diff --git a/core/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java b/core/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java index 80ee4dd6d0d..732614112bd 100644 --- a/core/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java +++ b/core/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java @@ -50,8 +50,8 @@ public class BaseRestHandlerTests extends ESTestCase { }; final HashMap params = new HashMap<>(); - params.put("consumed", randomAsciiOfLength(8)); - params.put("unconsumed", randomAsciiOfLength(8)); + params.put("consumed", randomAlphaOfLength(8)); + params.put("unconsumed", randomAlphaOfLength(8)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); final IllegalArgumentException e = @@ -71,9 +71,9 @@ public class BaseRestHandlerTests extends ESTestCase { }; final HashMap params = new HashMap<>(); - params.put("consumed", randomAsciiOfLength(8)); - params.put("unconsumed-first", randomAsciiOfLength(8)); - params.put("unconsumed-second", randomAsciiOfLength(8)); + params.put("consumed", randomAlphaOfLength(8)); + params.put("unconsumed-first", randomAlphaOfLength(8)); + params.put("unconsumed-second", randomAlphaOfLength(8)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); final IllegalArgumentException e = @@ -102,12 +102,12 @@ public class BaseRestHandlerTests extends ESTestCase { }; final HashMap params = new HashMap<>(); - params.put("consumed", randomAsciiOfLength(8)); - params.put("flied", randomAsciiOfLength(8)); - params.put("respones_param", randomAsciiOfLength(8)); - params.put("tokenzier", randomAsciiOfLength(8)); - params.put("very_close_to_parametre", randomAsciiOfLength(8)); - params.put("very_far_from_every_consumed_parameter", randomAsciiOfLength(8)); + params.put("consumed", randomAlphaOfLength(8)); + params.put("flied", randomAlphaOfLength(8)); + params.put("respones_param", randomAlphaOfLength(8)); + params.put("tokenzier", randomAlphaOfLength(8)); + params.put("very_close_to_parametre", randomAlphaOfLength(8)); + params.put("very_far_from_every_consumed_parameter", randomAlphaOfLength(8)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); final IllegalArgumentException e = @@ -140,8 +140,8 @@ public class BaseRestHandlerTests extends ESTestCase { }; final HashMap params = new HashMap<>(); - params.put("consumed", randomAsciiOfLength(8)); - params.put("response_param", randomAsciiOfLength(8)); + params.put("consumed", randomAlphaOfLength(8)); + params.put("response_param", randomAlphaOfLength(8)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); handler.handleRequest(request, channel, mock(NodeClient.class)); @@ -158,8 +158,8 @@ public class BaseRestHandlerTests extends ESTestCase { }; final HashMap params = new HashMap<>(); - params.put("format", randomAsciiOfLength(8)); - params.put("filter_path", randomAsciiOfLength(8)); + params.put("format", randomAlphaOfLength(8)); + params.put("filter_path", randomAlphaOfLength(8)); params.put("pretty", randomFrom("true", "false", "", null)); params.put("human", null); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); @@ -188,14 +188,14 @@ public class BaseRestHandlerTests extends ESTestCase { }; final HashMap params = new HashMap<>(); - params.put("format", randomAsciiOfLength(8)); - params.put("h", randomAsciiOfLength(8)); - params.put("v", randomAsciiOfLength(8)); - params.put("ts", randomAsciiOfLength(8)); - params.put("pri", randomAsciiOfLength(8)); - params.put("bytes", randomAsciiOfLength(8)); - params.put("size", randomAsciiOfLength(8)); - params.put("time", randomAsciiOfLength(8)); + params.put("format", randomAlphaOfLength(8)); + params.put("h", randomAlphaOfLength(8)); + params.put("v", randomAlphaOfLength(8)); + params.put("ts", randomAlphaOfLength(8)); + params.put("pri", randomAlphaOfLength(8)); + params.put("bytes", randomAlphaOfLength(8)); + params.put("size", randomAlphaOfLength(8)); + params.put("time", randomAlphaOfLength(8)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); handler.handleRequest(request, channel, mock(NodeClient.class)); diff --git a/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java b/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java index 431bc152bd9..37bed74f793 100644 --- a/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java +++ b/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java @@ -302,8 +302,8 @@ public class BytesRestResponseTests extends ESTestCase { expected.addMetadata("es.metadata_0", "0"); } if (randomBoolean()) { - String resourceType = randomAsciiOfLength(5); - String resourceId = randomAsciiOfLength(5); + String resourceType = randomAlphaOfLength(5); + String resourceId = randomAlphaOfLength(5); originalException.setResources(resourceType, resourceId); expected.setResources(resourceType, resourceId); } diff --git a/core/src/test/java/org/elasticsearch/rest/DeprecationRestHandlerTests.java b/core/src/test/java/org/elasticsearch/rest/DeprecationRestHandlerTests.java index 3c31b5b7f4d..0a8ace71d88 100644 --- a/core/src/test/java/org/elasticsearch/rest/DeprecationRestHandlerTests.java +++ b/core/src/test/java/org/elasticsearch/rest/DeprecationRestHandlerTests.java @@ -38,7 +38,7 @@ public class DeprecationRestHandlerTests extends ESTestCase { /** * Note: Headers should only use US ASCII (and this inevitably becomes one!). */ - private final String deprecationMessage = randomAsciiOfLengthBetween(1, 30); + private final String deprecationMessage = randomAlphaOfLengthBetween(1, 30); private final DeprecationLogger deprecationLogger = mock(DeprecationLogger.class); public void testNullHandler() { diff --git a/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java index 1e279a75218..102cfe5e3ac 100644 --- a/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -130,9 +130,9 @@ public class RestControllerTests extends ESTestCase { RestController controller = mock(RestController.class); RestRequest.Method method = randomFrom(RestRequest.Method.values()); - String path = "/_" + randomAsciiOfLengthBetween(1, 6); + String path = "/_" + randomAlphaOfLengthBetween(1, 6); RestHandler handler = mock(RestHandler.class); - String deprecationMessage = randomAsciiOfLengthBetween(1, 10); + String deprecationMessage = randomAlphaOfLengthBetween(1, 10); DeprecationLogger logger = mock(DeprecationLogger.class); // don't want to test everything -- just that it actually wraps the handler @@ -147,10 +147,10 @@ public class RestControllerTests extends ESTestCase { final RestController controller = mock(RestController.class); final RestRequest.Method method = randomFrom(RestRequest.Method.values()); - final String path = "/_" + randomAsciiOfLengthBetween(1, 6); + final String path = "/_" + randomAlphaOfLengthBetween(1, 6); final RestHandler handler = mock(RestHandler.class); final RestRequest.Method deprecatedMethod = randomFrom(RestRequest.Method.values()); - final String deprecatedPath = "/_" + randomAsciiOfLengthBetween(1, 6); + final String deprecatedPath = "/_" + randomAlphaOfLengthBetween(1, 6); final DeprecationLogger logger = mock(DeprecationLogger.class); final String deprecationMessage = "[" + deprecatedMethod.name() + " " + deprecatedPath + "] is deprecated! Use [" + @@ -206,7 +206,7 @@ public class RestControllerTests extends ESTestCase { public void testDispatchRequestAddsAndFreesBytesOnSuccess() { int contentLength = BREAKER_LIMIT.bytesAsInt(); - String content = randomAsciiOfLength(contentLength); + String content = randomAlphaOfLength(contentLength); TestRestRequest request = new TestRestRequest("/", content, XContentType.JSON); AssertingChannel channel = new AssertingChannel(request, true, RestStatus.OK); @@ -218,7 +218,7 @@ public class RestControllerTests extends ESTestCase { public void testDispatchRequestAddsAndFreesBytesOnError() { int contentLength = BREAKER_LIMIT.bytesAsInt(); - String content = randomAsciiOfLength(contentLength); + String content = randomAlphaOfLength(contentLength); TestRestRequest request = new TestRestRequest("/error", content, XContentType.JSON); AssertingChannel channel = new AssertingChannel(request, true, RestStatus.BAD_REQUEST); @@ -230,7 +230,7 @@ public class RestControllerTests extends ESTestCase { public void testDispatchRequestAddsAndFreesBytesOnlyOnceOnError() { int contentLength = BREAKER_LIMIT.bytesAsInt(); - String content = randomAsciiOfLength(contentLength); + String content = randomAlphaOfLength(contentLength); // we will produce an error in the rest handler and one more when sending the error response TestRestRequest request = new TestRestRequest("/error", content, XContentType.JSON); ExceptionThrowingChannel channel = new ExceptionThrowingChannel(request, true); @@ -243,7 +243,7 @@ public class RestControllerTests extends ESTestCase { public void testDispatchRequestLimitsBytes() { int contentLength = BREAKER_LIMIT.bytesAsInt() + 1; - String content = randomAsciiOfLength(contentLength); + String content = randomAlphaOfLength(contentLength); TestRestRequest request = new TestRestRequest("/", content, XContentType.JSON); AssertingChannel channel = new AssertingChannel(request, true, RestStatus.SERVICE_UNAVAILABLE); @@ -254,7 +254,7 @@ public class RestControllerTests extends ESTestCase { } public void testDispatchRequiresContentTypeForRequestsWithContent() { - String content = randomAsciiOfLengthBetween(1, BREAKER_LIMIT.bytesAsInt()); + String content = randomAlphaOfLengthBetween(1, BREAKER_LIMIT.bytesAsInt()); TestRestRequest request = new TestRestRequest("/", content, null); AssertingChannel channel = new AssertingChannel(request, true, RestStatus.NOT_ACCEPTABLE); restController = new RestController( @@ -279,7 +279,7 @@ public class RestControllerTests extends ESTestCase { } public void testDispatchFailsWithPlainText() { - String content = randomAsciiOfLengthBetween(1, BREAKER_LIMIT.bytesAsInt()); + String content = randomAlphaOfLengthBetween(1, BREAKER_LIMIT.bytesAsInt()); FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY) .withContent(new BytesArray(content), null).withPath("/foo") .withHeaders(Collections.singletonMap("Content-Type", Collections.singletonList("text/plain"))).build(); @@ -309,7 +309,7 @@ public class RestControllerTests extends ESTestCase { public void testDispatchWorksWithNewlineDelimitedJson() { final String mimeType = "application/x-ndjson"; - String content = randomAsciiOfLengthBetween(1, BREAKER_LIMIT.bytesAsInt()); + String content = randomAlphaOfLengthBetween(1, BREAKER_LIMIT.bytesAsInt()); FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY) .withContent(new BytesArray(content), null).withPath("/foo") .withHeaders(Collections.singletonMap("Content-Type", Collections.singletonList(mimeType))).build(); @@ -333,7 +333,7 @@ public class RestControllerTests extends ESTestCase { public void testDispatchWithContentStream() { final String mimeType = randomFrom("application/json", "application/smile"); - String content = randomAsciiOfLengthBetween(1, BREAKER_LIMIT.bytesAsInt()); + String content = randomAlphaOfLengthBetween(1, BREAKER_LIMIT.bytesAsInt()); FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY) .withContent(new BytesArray(content), null).withPath("/foo") .withHeaders(Collections.singletonMap("Content-Type", Collections.singletonList(mimeType))).build(); diff --git a/core/src/test/java/org/elasticsearch/rest/RestRequestTests.java b/core/src/test/java/org/elasticsearch/rest/RestRequestTests.java index fa5b25e4549..14eb413de08 100644 --- a/core/src/test/java/org/elasticsearch/rest/RestRequestTests.java +++ b/core/src/test/java/org/elasticsearch/rest/RestRequestTests.java @@ -113,7 +113,7 @@ public class RestRequestTests extends ESTestCase { } public void testPlainTextSupport() { - ContentRestRequest restRequest = new ContentRestRequest(randomAsciiOfLengthBetween(1, 30), Collections.emptyMap(), + ContentRestRequest restRequest = new ContentRestRequest(randomAlphaOfLengthBetween(1, 30), Collections.emptyMap(), Collections.singletonMap("Content-Type", Collections.singletonList(randomFrom("text/plain", "text/plain; charset=utf-8", "text/plain;charset=utf-8")))); assertNull(restRequest.getXContentType()); @@ -132,7 +132,7 @@ public class RestRequestTests extends ESTestCase { } public void testMultipleContentTypeHeaders() { - List headers = new ArrayList<>(randomUnique(() -> randomAsciiOfLengthBetween(1, 16), randomIntBetween(2, 10))); + List headers = new ArrayList<>(randomUnique(() -> randomAlphaOfLengthBetween(1, 16), randomIntBetween(2, 10))); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new ContentRestRequest("", Collections.emptyMap(), Collections.singletonMap("Content-Type", headers))); assertEquals("only one Content-Type header should be provided", e.getMessage()); diff --git a/core/src/test/java/org/elasticsearch/rest/action/RestMainActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/RestMainActionTests.java index 8a78cce8259..13e6de063e8 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/RestMainActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/RestMainActionTests.java @@ -43,7 +43,7 @@ public class RestMainActionTests extends ESTestCase { public void testHeadResponse() throws Exception { final String nodeName = "node1"; final ClusterName clusterName = new ClusterName("cluster1"); - final String clusterUUID = randomAsciiOfLengthBetween(10, 20); + final String clusterUUID = randomAlphaOfLengthBetween(10, 20); final boolean available = randomBoolean(); final RestStatus expectedStatus = available ? RestStatus.OK : RestStatus.SERVICE_UNAVAILABLE; final Version version = Version.CURRENT; @@ -69,7 +69,7 @@ public class RestMainActionTests extends ESTestCase { public void testGetResponse() throws Exception { final String nodeName = "node1"; final ClusterName clusterName = new ClusterName("cluster1"); - final String clusterUUID = randomAsciiOfLengthBetween(10, 20); + final String clusterUUID = randomAlphaOfLengthBetween(10, 20); final boolean available = randomBoolean(); final RestStatus expectedStatus = available ? RestStatus.OK : RestStatus.SERVICE_UNAVAILABLE; final Version version = Version.CURRENT; diff --git a/core/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsActionTests.java index ba478331ca0..7ece6934a21 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsActionTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESTestCase; @@ -49,7 +48,7 @@ public class RestNodesStatsActionTests extends ESTestCase { public void testUnrecognizedMetric() throws IOException { final HashMap params = new HashMap<>(); - final String metric = randomAsciiOfLength(64); + final String metric = randomAlphaOfLength(64); params.put("metric", metric); final RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("/_nodes/stats").withParams(params).build(); final IllegalArgumentException e = expectThrows( @@ -86,7 +85,7 @@ public class RestNodesStatsActionTests extends ESTestCase { public void testUnrecognizedIndexMetric() { final HashMap params = new HashMap<>(); params.put("metric", "indices"); - final String indexMetric = randomAsciiOfLength(64); + final String indexMetric = randomAlphaOfLength(64); params.put("index_metric", indexMetric); final RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("/_nodes/stats").withParams(params).build(); final IllegalArgumentException e = expectThrows( diff --git a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsActionTests.java index 0aa6e497836..12fcdaa2f55 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsActionTests.java @@ -46,7 +46,7 @@ public class RestIndicesStatsActionTests extends ESTestCase { public void testUnrecognizedMetric() throws IOException { final HashMap params = new HashMap<>(); - final String metric = randomAsciiOfLength(64); + final String metric = randomAlphaOfLength(64); params.put("metric", metric); final RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("/_stats").withParams(params).build(); final IllegalArgumentException e = expectThrows( diff --git a/core/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java index ae66664b456..7ee15adae33 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java @@ -81,7 +81,7 @@ public class RestIndicesActionTests extends ESTestCase { final int numIndices = randomIntBetween(0, 5); Index[] indices = new Index[numIndices]; for (int i = 0; i < numIndices; i++) { - indices[i] = new Index(randomAsciiOfLength(5), UUIDs.randomBase64UUID()); + indices[i] = new Index(randomAlphaOfLength(5), UUIDs.randomBase64UUID()); } final MetaData.Builder metaDataBuilder = MetaData.builder(); diff --git a/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java index 232d5ec3d32..6fce7219d67 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java @@ -69,11 +69,11 @@ public class RestRecoveryActionTests extends ESTestCase { when(state.getStage()).thenReturn(randomFrom(RecoveryState.Stage.values())); final DiscoveryNode sourceNode = randomBoolean() ? mock(DiscoveryNode.class) : null; if (sourceNode != null) { - when(sourceNode.getHostName()).thenReturn(randomAsciiOfLength(8)); + when(sourceNode.getHostName()).thenReturn(randomAlphaOfLength(8)); } when(state.getSourceNode()).thenReturn(sourceNode); final DiscoveryNode targetNode = mock(DiscoveryNode.class); - when(targetNode.getHostName()).thenReturn(randomAsciiOfLength(8)); + when(targetNode.getHostName()).thenReturn(randomAlphaOfLength(8)); when(state.getTargetNode()).thenReturn(targetNode); RecoveryState.Index index = mock(RecoveryState.Index.class); diff --git a/core/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java b/core/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java index 5a02ea1e96a..fb1e0885a5f 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.script; import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -103,10 +102,10 @@ public class ScriptMetaDataTests extends AbstractSerializingTestCase contexts = new HashMap<>(); for (int i = 0; i < randomInt; i++) { - String plugin = randomAsciiOfLength(randomIntBetween(1, 10)); - String operation = randomAsciiOfLength(randomIntBetween(1, 30)); + String plugin = randomAlphaOfLength(randomIntBetween(1, 10)); + String operation = randomAlphaOfLength(randomIntBetween(1, 30)); String context = plugin + "-" + operation; contexts.put(context, new ScriptContext.Plugin(plugin, operation)); } diff --git a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index 3482dc8bb34..da205a9292e 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -95,11 +95,11 @@ public class ScriptServiceTests extends ESTestCase { for (int i = 0; i < randomInt; i++) { String plugin; do { - plugin = randomAsciiOfLength(randomIntBetween(1, 10)); + plugin = randomAlphaOfLength(randomIntBetween(1, 10)); } while (ScriptContextRegistry.RESERVED_SCRIPT_CONTEXTS.contains(plugin)); String operation; do { - operation = randomAsciiOfLength(randomIntBetween(1, 30)); + operation = randomAlphaOfLength(randomIntBetween(1, 30)); } while (ScriptContextRegistry.RESERVED_SCRIPT_CONTEXTS.contains(operation)); String context = plugin + "_" + operation; contexts.put(context, new ScriptContext.Plugin(plugin, operation)); @@ -324,8 +324,8 @@ public class ScriptServiceTests extends ESTestCase { String pluginName; String unknownContext; do { - pluginName = randomAsciiOfLength(randomIntBetween(1, 10)); - unknownContext = randomAsciiOfLength(randomIntBetween(1, 30)); + pluginName = randomAlphaOfLength(randomIntBetween(1, 10)); + unknownContext = randomAlphaOfLength(randomIntBetween(1, 30)); } while(scriptContextRegistry.isSupportedContext(new ScriptContext.Plugin(pluginName, unknownContext))); String type = scriptEngineService.getType(); diff --git a/core/src/test/java/org/elasticsearch/script/ScriptTests.java b/core/src/test/java/org/elasticsearch/script/ScriptTests.java index 70c5af00f89..9584bf01a5c 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptTests.java @@ -67,12 +67,12 @@ public class ScriptTests extends ESTestCase { if (scriptType == ScriptType.INLINE) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) { builder.startObject(); - builder.field("field", randomAsciiOfLengthBetween(1, 5)); + builder.field("field", randomAlphaOfLengthBetween(1, 5)); builder.endObject(); script = builder.string(); } } else { - script = randomAsciiOfLengthBetween(1, 5); + script = randomAlphaOfLengthBetween(1, 5); } return new Script( scriptType, diff --git a/core/src/test/java/org/elasticsearch/script/StoredScriptTests.java b/core/src/test/java/org/elasticsearch/script/StoredScriptTests.java index 5f000f28cbc..96a9a417bbe 100644 --- a/core/src/test/java/org/elasticsearch/script/StoredScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/StoredScriptTests.java @@ -343,8 +343,8 @@ public class StoredScriptTests extends AbstractSerializingTestCase client().admin().cluster().preparePutStoredScript() .setLang(LANG) .setId("foobar") - .setContent(new BytesArray(randomAsciiOfLength(SCRIPT_MAX_SIZE_IN_BYTES + 1)), XContentType.JSON) + .setContent(new BytesArray(randomAlphaOfLength(SCRIPT_MAX_SIZE_IN_BYTES + 1)), XContentType.JSON) .get() ); assertEquals("exceeded max allowed stored script size in bytes [64] with size [65] for script [foobar]", e.getMessage()); diff --git a/core/src/test/java/org/elasticsearch/search/AbstractSearchTestCase.java b/core/src/test/java/org/elasticsearch/search/AbstractSearchTestCase.java index ba4c794a090..5fe9d9d75fb 100644 --- a/core/src/test/java/org/elasticsearch/search/AbstractSearchTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/AbstractSearchTestCase.java @@ -83,7 +83,7 @@ public abstract class AbstractSearchTestCase extends ESTestCase { } List searchExtBuilders = new ArrayList<>(); for (String elementName : elementNames) { - searchExtBuilders.add(searchExtPlugin.getSupportedElements().get(elementName).apply(randomAsciiOfLengthBetween(3, 10))); + searchExtBuilders.add(searchExtPlugin.getSupportedElements().get(elementName).apply(randomAlphaOfLengthBetween(3, 10))); } return searchExtBuilders; }; diff --git a/core/src/test/java/org/elasticsearch/search/NestedIdentityTests.java b/core/src/test/java/org/elasticsearch/search/NestedIdentityTests.java index b062ace0f23..3e5943951ae 100644 --- a/core/src/test/java/org/elasticsearch/search/NestedIdentityTests.java +++ b/core/src/test/java/org/elasticsearch/search/NestedIdentityTests.java @@ -40,7 +40,7 @@ import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashC public class NestedIdentityTests extends ESTestCase { public static NestedIdentity createTestItem(int depth) { - String field = frequently() ? randomAsciiOfLengthBetween(1, 20) : randomRealisticUnicodeOfCodepointLengthBetween(1, 20); + String field = frequently() ? randomAlphaOfLengthBetween(1, 20) : randomRealisticUnicodeOfCodepointLengthBetween(1, 20); int offset = randomInt(10); NestedIdentity child = null; if (depth > 0) { diff --git a/core/src/test/java/org/elasticsearch/search/SearchHitTests.java b/core/src/test/java/org/elasticsearch/search/SearchHitTests.java index 4c7a6f10e76..84d40f0b9b3 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchHitTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchHitTests.java @@ -61,8 +61,8 @@ public class SearchHitTests extends ESTestCase { public static SearchHit createTestItem(boolean withOptionalInnerHits) { int internalId = randomInt(); - String uid = randomAsciiOfLength(10); - Text type = new Text(randomAsciiOfLengthBetween(5, 10)); + String uid = randomAlphaOfLength(10); + Text type = new Text(randomAlphaOfLengthBetween(5, 10)); NestedIdentity nestedIdentity = null; if (randomBoolean()) { nestedIdentity = NestedIdentityTests.createTestItem(randomIntBetween(0, 2)); @@ -77,7 +77,7 @@ public class SearchHitTests extends ESTestCase { String metaField = randomFrom(META_FIELDS); fields.put(metaField, new SearchHitField(metaField, values.v1())); } else { - String fieldName = randomAsciiOfLengthBetween(5, 10); + String fieldName = randomAlphaOfLengthBetween(5, 10); fields.put(fieldName, new SearchHitField(fieldName, values.v1())); } } @@ -103,7 +103,7 @@ public class SearchHitTests extends ESTestCase { int size = randomIntBetween(0, 5); Map highlightFields = new HashMap<>(size); for (int i = 0; i < size; i++) { - highlightFields.put(randomAsciiOfLength(5), HighlightFieldTests.createTestItem()); + highlightFields.put(randomAlphaOfLength(5), HighlightFieldTests.createTestItem()); } hit.highlightFields(highlightFields); } @@ -111,7 +111,7 @@ public class SearchHitTests extends ESTestCase { int size = randomIntBetween(0, 5); String[] matchedQueries = new String[size]; for (int i = 0; i < size; i++) { - matchedQueries[i] = randomAsciiOfLength(5); + matchedQueries[i] = randomAlphaOfLength(5); } hit.matchedQueries(matchedQueries); } @@ -122,13 +122,13 @@ public class SearchHitTests extends ESTestCase { int innerHitsSize = randomIntBetween(0, 3); Map innerHits = new HashMap<>(innerHitsSize); for (int i = 0; i < innerHitsSize; i++) { - innerHits.put(randomAsciiOfLength(5), SearchHitsTests.createTestItem()); + innerHits.put(randomAlphaOfLength(5), SearchHitsTests.createTestItem()); } hit.setInnerHits(innerHits); } if (randomBoolean()) { - hit.shard(new SearchShardTarget(randomAsciiOfLengthBetween(5, 10), - new ShardId(new Index(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10)), randomInt()))); + hit.shard(new SearchShardTarget(randomAlphaOfLengthBetween(5, 10), + new ShardId(new Index(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)), randomInt()))); } return hit; } @@ -149,6 +149,24 @@ public class SearchHitTests extends ESTestCase { assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType); } + /** + * When e.g. with "stored_fields": "_none_", only "_index" and "_score" are returned. + */ + public void testFromXContentWithoutTypeAndId() throws IOException { + String hit = "{\"_index\": \"my_index\", \"_score\": 1}"; + SearchHit parsed; + try (XContentParser parser = createParser(JsonXContent.jsonXContent, hit)) { + parser.nextToken(); // jump to first START_OBJECT + parsed = SearchHit.fromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + assertNull(parser.nextToken()); + } + assertEquals("my_index", parsed.getIndex()); + assertEquals(1, parsed.getScore(), Float.MIN_VALUE); + assertNull(parsed.getType()); + assertNull(parsed.getId()); + } + public void testToXContent() throws IOException { SearchHit searchHit = new SearchHit(1, "id1", new Text("type"), Collections.emptyMap()); searchHit.score(1.5f); @@ -216,7 +234,7 @@ public class SearchHitTests extends ESTestCase { } private static Explanation createExplanation(int depth) { - String description = randomAsciiOfLengthBetween(5, 20); + String description = randomAlphaOfLengthBetween(5, 20); float value = randomFloat(); List details = new ArrayList<>(); if (depth > 0) { diff --git a/core/src/test/java/org/elasticsearch/search/SearchRequestTests.java b/core/src/test/java/org/elasticsearch/search/SearchRequestTests.java index 9e58bf26744..47fc61fb6a6 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchRequestTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchRequestTests.java @@ -87,12 +87,12 @@ public class SearchRequestTests extends AbstractSearchTestCase { private SearchRequest mutate(SearchRequest searchRequest) throws IOException { SearchRequest mutation = copyRequest(searchRequest); List mutators = new ArrayList<>(); - mutators.add(() -> mutation.indices(ArrayUtils.concat(searchRequest.indices(), new String[] { randomAsciiOfLength(10) }))); + mutators.add(() -> mutation.indices(ArrayUtils.concat(searchRequest.indices(), new String[] { randomAlphaOfLength(10) }))); mutators.add(() -> mutation.indicesOptions(randomValueOtherThan(searchRequest.indicesOptions(), () -> IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())))); - mutators.add(() -> mutation.types(ArrayUtils.concat(searchRequest.types(), new String[] { randomAsciiOfLength(10) }))); - mutators.add(() -> mutation.preference(randomValueOtherThan(searchRequest.preference(), () -> randomAsciiOfLengthBetween(3, 10)))); - mutators.add(() -> mutation.routing(randomValueOtherThan(searchRequest.routing(), () -> randomAsciiOfLengthBetween(3, 10)))); + mutators.add(() -> mutation.types(ArrayUtils.concat(searchRequest.types(), new String[] { randomAlphaOfLength(10) }))); + mutators.add(() -> mutation.preference(randomValueOtherThan(searchRequest.preference(), () -> randomAlphaOfLengthBetween(3, 10)))); + mutators.add(() -> mutation.routing(randomValueOtherThan(searchRequest.routing(), () -> randomAlphaOfLengthBetween(3, 10)))); mutators.add(() -> mutation.requestCache((randomValueOtherThan(searchRequest.requestCache(), () -> randomBoolean())))); mutators.add(() -> mutation .scroll(randomValueOtherThan(searchRequest.scroll(), () -> new Scroll(new TimeValue(randomNonNegativeLong() % 100000))))); diff --git a/core/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/core/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 0f626903609..f3ff6be1cc1 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -48,7 +48,6 @@ import org.elasticsearch.search.fetch.ShardFetchRequest; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchLocalRequest; -import org.elasticsearch.search.query.QuerySearchResultProvider; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; @@ -184,13 +183,13 @@ public class SearchServiceTests extends ESSingleNodeTestCase { final int rounds = scaledRandomIntBetween(100, 10000); for (int i = 0; i < rounds; i++) { try { - QuerySearchResultProvider querySearchResultProvider = service.executeQueryPhase( + SearchPhaseResult searchPhaseResult = service.executeQueryPhase( new ShardSearchLocalRequest(indexShard.shardId(), 1, SearchType.DEFAULT, new SearchSourceBuilder(), new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f), new SearchTask(123L, "", "", "", null)); IntArrayList intCursors = new IntArrayList(1); intCursors.add(0); - ShardFetchRequest req = new ShardFetchRequest(querySearchResultProvider.id(), intCursors, null /* not a scroll */); + ShardFetchRequest req = new ShardFetchRequest(searchPhaseResult.getRequestId(), intCursors, null /* not a scroll */); service.executeFetchPhase(req, new SearchTask(123L, "", "", "", null)); } catch (AlreadyClosedException ex) { throw ex; diff --git a/core/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java b/core/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java index 96733a2ac6d..654b3c98615 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java @@ -52,7 +52,7 @@ public class SearchSortValuesTests extends ESTestCase { valueSuppliers.add(() -> randomByte()); valueSuppliers.add(() -> randomShort()); valueSuppliers.add(() -> randomBoolean()); - valueSuppliers.add(() -> frequently() ? randomAsciiOfLengthBetween(1, 30) : randomRealisticUnicodeOfCodepointLength(30)); + valueSuppliers.add(() -> frequently() ? randomAlphaOfLengthBetween(1, 30) : randomRealisticUnicodeOfCodepointLength(30)); int size = randomIntBetween(1, 20); Object[] values = new Object[size]; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java index 2aba0ed59ee..180e639eb34 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java @@ -58,7 +58,7 @@ public class AggregatorFactoriesTests extends ESTestCase { // stick around for all of the subclasses currentTypes = new String[randomIntBetween(0, 5)]; for (int i = 0; i < currentTypes.length; i++) { - String type = randomAsciiOfLengthBetween(1, 10); + String type = randomAlphaOfLengthBetween(1, 10); currentTypes[i] = type; } xContentRegistry = new NamedXContentRegistry(new SearchModule(settings, false, emptyList()).getNamedXContents()); @@ -174,7 +174,7 @@ public class AggregatorFactoriesTests extends ESTestCase { public void testSameAggregationName() throws Exception { assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled", XContent.isStrictDuplicateDetectionEnabled()); - final String name = randomAsciiOfLengthBetween(1, 10); + final String name = randomAlphaOfLengthBetween(1, 10); XContentBuilder source = JsonXContent.contentBuilder() .startObject() .startObject(name) diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 363e972456e..a0f7bbfcba4 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -238,13 +238,16 @@ public abstract class AggregatorTestCase extends ESTestCase { if (aggs.isEmpty()) { return null; } else { - if (randomBoolean()) { + if (randomBoolean() && aggs.size() > 1) { // sometimes do an incremental reduce - List internalAggregations = randomSubsetOf(randomIntBetween(1, aggs.size()), aggs); - A internalAgg = (A) aggs.get(0).doReduce(internalAggregations, + int toReduceSize = aggs.size(); + Collections.shuffle(aggs, random()); + int r = randomIntBetween(1, toReduceSize); + List toReduce = aggs.subList(0, r); + A reduced = (A) aggs.get(0).doReduce(toReduce, new InternalAggregation.ReduceContext(root.context().bigArrays(), null, false)); - aggs.removeAll(internalAggregations); - aggs.add(internalAgg); + aggs = new ArrayList<>(aggs.subList(r, toReduceSize)); + aggs.add(reduced); } // now do the final reduce @SuppressWarnings("unchecked") diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java index 162b7bcd1b6..651c261aa81 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java @@ -85,7 +85,7 @@ public abstract class BaseAggregationTestCase return createTestInstance(name, pipelineAggregators, metaData); } - public final void testReduceRandom() { - String name = randomAsciiOfLength(5); + public void testReduceRandom() { + String name = randomAlphaOfLength(5); List inputs = new ArrayList<>(); List toReduce = new ArrayList<>(); int toReduceSize = between(1, 200); @@ -63,15 +63,15 @@ public abstract class InternalAggregationTestCase ScriptService mockScriptService = mockScriptService(); MockBigArrays bigArrays = new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()); if (randomBoolean() && toReduce.size() > 1) { + // sometimes do an incremental reduce Collections.shuffle(toReduce, random()); - // we leave at least one element in the list - int r = Math.max(1, randomIntBetween(0, toReduceSize - 2)); + int r = randomIntBetween(1, toReduceSize); List internalAggregations = toReduce.subList(0, r); InternalAggregation.ReduceContext context = new InternalAggregation.ReduceContext(bigArrays, mockScriptService, false); @SuppressWarnings("unchecked") T reduced = (T) inputs.get(0).reduce(internalAggregations, context); - toReduce = toReduce.subList(r, toReduceSize); + toReduce = new ArrayList<>(toReduce.subList(r, toReduceSize)); toReduce.add(reduced); } InternalAggregation.ReduceContext context = @@ -92,7 +92,7 @@ public abstract class InternalAggregationTestCase @Override protected final T createTestInstance() { - return createTestInstance(randomAsciiOfLength(5)); + return createTestInstance(randomAlphaOfLength(5)); } private T createTestInstance(String name) { @@ -101,7 +101,7 @@ public abstract class InternalAggregationTestCase Map metaData = new HashMap<>(); int metaDataCount = randomBoolean() ? 0 : between(1, 10); while (metaData.size() < metaDataCount) { - metaData.put(randomAsciiOfLength(5), randomAsciiOfLength(5)); + metaData.put(randomAlphaOfLength(5), randomAlphaOfLength(5)); } return createTestInstance(name, pipelineAggregators, metaData); } @@ -113,7 +113,7 @@ public abstract class InternalAggregationTestCase Map metaData = new HashMap<>(); int metaDataCount = randomBoolean() ? 0 : between(1, 10); while (metaData.size() < metaDataCount) { - metaData.put(randomAsciiOfLength(5), randomAsciiOfLength(5)); + metaData.put(randomAlphaOfLength(5), randomAlphaOfLength(5)); } return createUnmappedInstance(name, pipelineAggregators, metaData); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenTests.java index c7844f29d05..4098e85c62e 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenTests.java @@ -26,8 +26,8 @@ public class ChildrenTests extends BaseAggregationTestCase for (int i = 0; i < numRanges; i++) { String key = null; if (randomBoolean()) { - key = randomAsciiOfLengthBetween(1, 20); + key = randomAlphaOfLengthBetween(1, 20); } double from = randomBoolean() ? Double.NEGATIVE_INFINITY : randomIntBetween(Integer.MIN_VALUE, Integer.MAX_VALUE - 1000); double to = randomBoolean() ? Double.POSITIVE_INFINITY diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java index 29c8d74f984..f545ae500a2 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java @@ -51,9 +51,9 @@ public class SignificantTermsTests extends BaseAggregationTestCase includeValues = new TreeSet<>(); int numIncs = randomIntBetween(1, 20); for (int i = 0; i < numIncs; i++) { - includeValues.add(new BytesRef(randomAsciiOfLengthBetween(1, 30))); + includeValues.add(new BytesRef(randomAlphaOfLengthBetween(1, 30))); } SortedSet excludeValues = null; incExc = new IncludeExclude(includeValues, excludeValues); @@ -141,7 +141,7 @@ public class SignificantTermsTests extends BaseAggregationTestCase excludeValues2 = new TreeSet<>(); int numExcs2 = randomIntBetween(1, 20); for (int i = 0; i < numExcs2; i++) { - excludeValues2.add(new BytesRef(randomAsciiOfLengthBetween(1, 30))); + excludeValues2.add(new BytesRef(randomAlphaOfLengthBetween(1, 30))); } incExc = new IncludeExclude(includeValues2, excludeValues2); break; @@ -149,12 +149,12 @@ public class SignificantTermsTests extends BaseAggregationTestCase includeValues3 = new TreeSet<>(); int numIncs3 = randomIntBetween(1, 20); for (int i = 0; i < numIncs3; i++) { - includeValues3.add(new BytesRef(randomAsciiOfLengthBetween(1, 30))); + includeValues3.add(new BytesRef(randomAlphaOfLengthBetween(1, 30))); } SortedSet excludeValues3 = new TreeSet<>(); int numExcs3 = randomIntBetween(1, 20); for (int i = 0; i < numExcs3; i++) { - excludeValues3.add(new BytesRef(randomAsciiOfLengthBetween(1, 30))); + excludeValues3.add(new BytesRef(randomAlphaOfLengthBetween(1, 30))); } incExc = new IncludeExclude(includeValues3, excludeValues3); break; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/StringTermsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/StringTermsIT.java index 46af395c476..b6ff885638f 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/StringTermsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/StringTermsIT.java @@ -18,8 +18,6 @@ */ package org.elasticsearch.search.aggregations.bucket; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -1095,10 +1093,10 @@ public class StringTermsIT extends AbstractTermsTestCase { public void testSingleValuedFieldOrderedBySubAggregationAscMultiHierarchyLevelsSpecialChars() throws Exception { StringBuilder filter2NameBuilder = new StringBuilder("filt.er2"); - filter2NameBuilder.append(randomAsciiOfLengthBetween(3, 10).replace("[", "").replace("]", "").replace(">", "")); + filter2NameBuilder.append(randomAlphaOfLengthBetween(3, 10).replace("[", "").replace("]", "").replace(">", "")); String filter2Name = filter2NameBuilder.toString(); StringBuilder statsNameBuilder = new StringBuilder("st.ats"); - statsNameBuilder.append(randomAsciiOfLengthBetween(3, 10).replace("[", "").replace("]", "").replace(">", "")); + statsNameBuilder.append(randomAlphaOfLengthBetween(3, 10).replace("[", "").replace("]", "").replace(">", "")); String statsName = statsNameBuilder.toString(); boolean asc = randomBoolean(); SearchResponse response = client() @@ -1158,10 +1156,10 @@ public class StringTermsIT extends AbstractTermsTestCase { public void testSingleValuedFieldOrderedBySubAggregationAscMultiHierarchyLevelsSpecialCharsNoDotNotation() throws Exception { StringBuilder filter2NameBuilder = new StringBuilder("filt.er2"); - filter2NameBuilder.append(randomAsciiOfLengthBetween(3, 10).replace("[", "").replace("]", "").replace(">", "")); + filter2NameBuilder.append(randomAlphaOfLengthBetween(3, 10).replace("[", "").replace("]", "").replace(">", "")); String filter2Name = filter2NameBuilder.toString(); StringBuilder statsNameBuilder = new StringBuilder("st.ats"); - statsNameBuilder.append(randomAsciiOfLengthBetween(3, 10).replace("[", "").replace("]", "").replace(">", "")); + statsNameBuilder.append(randomAlphaOfLengthBetween(3, 10).replace("[", "").replace("]", "").replace(">", "")); String statsName = statsNameBuilder.toString(); boolean asc = randomBoolean(); SearchResponse response = client() diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsTests.java index 2241c0a2f9b..d0a38d0890a 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsTests.java @@ -47,9 +47,9 @@ public class TermsTests extends BaseAggregationTestCase @Override protected TermsAggregationBuilder createTestAggregatorBuilder() { - String name = randomAsciiOfLengthBetween(3, 20); + String name = randomAlphaOfLengthBetween(3, 20); TermsAggregationBuilder factory = new TermsAggregationBuilder(name, null); - String field = randomAsciiOfLengthBetween(3, 20); + String field = randomAlphaOfLengthBetween(3, 20); int randomFieldBranch = randomInt(2); switch (randomFieldBranch) { case 0: @@ -131,7 +131,7 @@ public class TermsTests extends BaseAggregationTestCase SortedSet includeValues = new TreeSet<>(); int numIncs = randomIntBetween(1, 20); for (int i = 0; i < numIncs; i++) { - includeValues.add(new BytesRef(randomAsciiOfLengthBetween(1, 30))); + includeValues.add(new BytesRef(randomAlphaOfLengthBetween(1, 30))); } SortedSet excludeValues = null; incExc = new IncludeExclude(includeValues, excludeValues); @@ -141,7 +141,7 @@ public class TermsTests extends BaseAggregationTestCase SortedSet excludeValues2 = new TreeSet<>(); int numExcs2 = randomIntBetween(1, 20); for (int i = 0; i < numExcs2; i++) { - excludeValues2.add(new BytesRef(randomAsciiOfLengthBetween(1, 30))); + excludeValues2.add(new BytesRef(randomAlphaOfLengthBetween(1, 30))); } incExc = new IncludeExclude(includeValues2, excludeValues2); break; @@ -149,12 +149,12 @@ public class TermsTests extends BaseAggregationTestCase SortedSet includeValues3 = new TreeSet<>(); int numIncs3 = randomIntBetween(1, 20); for (int i = 0; i < numIncs3; i++) { - includeValues3.add(new BytesRef(randomAsciiOfLengthBetween(1, 30))); + includeValues3.add(new BytesRef(randomAlphaOfLengthBetween(1, 30))); } SortedSet excludeValues3 = new TreeSet<>(); int numExcs3 = randomIntBetween(1, 20); for (int i = 0; i < numExcs3; i++) { - excludeValues3.add(new BytesRef(randomAsciiOfLengthBetween(1, 30))); + excludeValues3.add(new BytesRef(randomAlphaOfLengthBetween(1, 30))); } incExc = new IncludeExclude(includeValues3, excludeValues3); break; @@ -188,10 +188,10 @@ public class TermsTests extends BaseAggregationTestCase orders.add(Terms.Order.count(randomBoolean())); break; case 2: - orders.add(Terms.Order.aggregation(randomAsciiOfLengthBetween(3, 20), randomBoolean())); + orders.add(Terms.Order.aggregation(randomAlphaOfLengthBetween(3, 20), randomBoolean())); break; case 3: - orders.add(Terms.Order.aggregation(randomAsciiOfLengthBetween(3, 20), randomAsciiOfLengthBetween(3, 20), randomBoolean())); + orders.add(Terms.Order.aggregation(randomAlphaOfLengthBetween(3, 20), randomAlphaOfLengthBetween(3, 20), randomBoolean())); break; case 4: int numOrders = randomIntBetween(1, 3); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java new file mode 100644 index 00000000000..424c3aed210 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java @@ -0,0 +1,145 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket.missing; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.store.Directory; +import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.support.ValueType; + +import java.io.IOException; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Consumer; + + +public class MissingAggregatorTests extends AggregatorTestCase { + public void testMatchNoDocs() throws IOException { + int numDocs = randomIntBetween(10, 200); + testBothCases(numDocs, + "field", + Queries.newMatchAllQuery(), + doc -> doc.add(new SortedNumericDocValuesField("field", randomLong())), + internalMissing -> assertEquals(internalMissing.getDocCount(), 0)); + } + + public void testMatchAllDocs() throws IOException { + int numDocs = randomIntBetween(10, 200); + testBothCases(numDocs, + "field", + Queries.newMatchAllQuery(), + doc -> doc.add(new SortedNumericDocValuesField("another_field", randomLong())), + internalMissing -> assertEquals(internalMissing.getDocCount(), numDocs)); + } + + public void testMatchSparse() throws IOException { + int numDocs = randomIntBetween(100, 200); + final AtomicInteger count = new AtomicInteger(); + testBothCases(numDocs, + "field", + Queries.newMatchAllQuery(), + doc -> { + if (randomBoolean()) { + doc.add(new SortedNumericDocValuesField("another_field", randomLong())); + count.incrementAndGet(); + } else { + doc.add(new SortedNumericDocValuesField("field", randomLong())); + } + }, + internalMissing -> { + assertEquals(internalMissing.getDocCount(), count.get()); + count.set(0); + }); + } + + public void testMissingField() throws IOException { + int numDocs = randomIntBetween(10, 20); + testBothCases(numDocs, + "unknown_field", + Queries.newMatchAllQuery(), + doc -> { + doc.add(new SortedNumericDocValuesField("field", randomLong())); + }, + internalMissing -> { + assertEquals(internalMissing.getDocCount(), numDocs); + }); + } + + private void testBothCases(int numDocs, + String fieldName, + Query query, + Consumer consumer, + Consumer verify) throws IOException { + executeTestCase(numDocs, fieldName, query, consumer, verify, false); + executeTestCase(numDocs, fieldName, query, consumer, verify, true); + + } + + private void executeTestCase(int numDocs, + String fieldName, + Query query, + Consumer consumer, + Consumer verify, + boolean reduced) throws IOException { + try (Directory directory = newDirectory()) { + try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + Document document = new Document(); + for (int i = 0; i < numDocs; i++) { + if (frequently()) { + indexWriter.commit(); + } + consumer.accept(document); + indexWriter.addDocument(document); + document.clear(); + } + } + + try (IndexReader indexReader = DirectoryReader.open(directory)) { + IndexSearcher indexSearcher = + newSearcher(indexReader, true, true); + MissingAggregationBuilder builder = + new MissingAggregationBuilder("_name", ValueType.LONG); + builder.field(fieldName); + + NumberFieldMapper.Builder mapperBuilder = new NumberFieldMapper.Builder("_name", + NumberFieldMapper.NumberType.LONG); + MappedFieldType fieldType = mapperBuilder.fieldType(); + fieldType.setHasDocValues(true); + fieldType.setName(builder.field()); + + InternalMissing missing; + if (reduced) { + missing = searchAndReduce(indexSearcher, query, builder, fieldType); + } else { + missing = search(indexSearcher, query, builder, fieldType); + } + verify.accept(missing); + } + } + } +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedTests.java index 29dde100a08..ff2563fc4cc 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedTests.java @@ -25,7 +25,7 @@ public class NestedTests extends BaseAggregationTestCase[] ranges = new Tuple[numRanges]; for (int i = 0; i < numRanges; i++) { BytesRef[] values = new BytesRef[2]; - values[0] = new BytesRef(randomAsciiOfLength(15)); - values[1] = new BytesRef(randomAsciiOfLength(15)); + values[0] = new BytesRef(randomAlphaOfLength(15)); + values[1] = new BytesRef(randomAlphaOfLength(15)); Arrays.sort(values); ranges[i] = new Tuple(values[0], values[1]); } @@ -60,7 +60,7 @@ public class InternalBinaryRangeTests extends InternalAggregationTestCase buckets = new ArrayList<>(); for (int i = 0; i < RANGES.length; ++i) { final int docCount = randomIntBetween(1, 100); - buckets.add(new InternalBinaryRange.Bucket(format, keyed, randomAsciiOfLength(10), + buckets.add(new InternalBinaryRange.Bucket(format, keyed, randomAlphaOfLength(10), RANGES[i].v1(), RANGES[i].v2(), docCount, InternalAggregations.EMPTY)); } return new InternalBinaryRange(name, format, keyed, buckets, pipelineAggregators, Collections.emptyMap()); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTermsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTermsTests.java index f957836b3e4..82cd21cdf38 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTermsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTermsTests.java @@ -62,7 +62,7 @@ public class SignificantStringTermsTests extends InternalSignificantTermsTestCas List buckets = new ArrayList<>(numBuckets); Set terms = new HashSet<>(); for (int i = 0; i < numBuckets; ++i) { - BytesRef term = randomValueOtherThanMany(b -> terms.add(b) == false, () -> new BytesRef(randomAsciiOfLength(10))); + BytesRef term = randomValueOtherThanMany(b -> terms.add(b) == false, () -> new BytesRef(randomAlphaOfLength(10))); int subsetDf = randomIntBetween(1, 10); int supersetDf = randomIntBetween(subsetDf, 20); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsTests.java index d20f546a541..64e814bd819 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsTests.java @@ -50,7 +50,7 @@ public class StringTermsTests extends InternalTermsTestCase { final int numBuckets = randomInt(shardSize); Set terms = new HashSet<>(); for (int i = 0; i < numBuckets; ++i) { - BytesRef term = randomValueOtherThanMany(b -> terms.add(b) == false, () -> new BytesRef(randomAsciiOfLength(10))); + BytesRef term = randomValueOtherThanMany(b -> terms.add(b) == false, () -> new BytesRef(randomAlphaOfLength(10))); int docCount = randomIntBetween(1, 100); buckets.add(new StringTerms.Bucket(term, docCount, InternalAggregations.EMPTY, showTermDocCountError, docCountError, format)); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AdjacencyMatrixTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AdjacencyMatrixTests.java index 05390b1282d..a12dbad59fb 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AdjacencyMatrixTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AdjacencyMatrixTests.java @@ -36,10 +36,10 @@ public class AdjacencyMatrixTests extends BaseAggregationTestCase filters = new HashMap<>(size); - for (String key : randomUnique(() -> randomAsciiOfLengthBetween(1, 20), size)) { - filters.put(key, QueryBuilders.termQuery(randomAsciiOfLengthBetween(5, 20), randomAsciiOfLengthBetween(5, 20))); + for (String key : randomUnique(() -> randomAlphaOfLengthBetween(1, 20), size)) { + filters.put(key, QueryBuilders.termQuery(randomAlphaOfLengthBetween(5, 20), randomAlphaOfLengthBetween(5, 20))); } - factory = new AdjacencyMatrixAggregationBuilder(randomAsciiOfLengthBetween(1, 20), filters) + factory = new AdjacencyMatrixAggregationBuilder(randomAlphaOfLengthBetween(1, 20), filters) .separator(randomFrom("&","+","\t")); return factory; } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java new file mode 100644 index 00000000000..b80dd163fc9 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java @@ -0,0 +1,131 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.metrics; + +import org.apache.lucene.document.IntPoint; +import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.search.FieldValueQuery; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.store.Directory; +import org.elasticsearch.common.CheckedConsumer; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregator; +import org.elasticsearch.search.aggregations.metrics.cardinality.InternalCardinality; +import org.elasticsearch.search.aggregations.support.ValueType; + +import java.io.IOException; +import java.util.Arrays; +import java.util.function.Consumer; + +import static java.util.Collections.singleton; + +public class CardinalityAggregatorTests extends AggregatorTestCase { + public void testNoDocs() throws IOException { + testCase(new MatchAllDocsQuery(), iw -> { + // Intentionally not writing any docs + }, card -> { + assertEquals(0.0, card.getValue(), 0); + }); + } + + public void testNoMatchingField() throws IOException { + testCase(new MatchAllDocsQuery(), iw -> { + iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 7))); + iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 1))); + }, card -> { + assertEquals(0.0, card.getValue(), 0); + }); + } + + public void testSomeMatchesSortedNumericDocValues() throws IOException { + testCase(new FieldValueQuery("number"), iw -> { + iw.addDocument(singleton(new SortedNumericDocValuesField("number", 7))); + iw.addDocument(singleton(new SortedNumericDocValuesField("number", 1))); + }, card -> { + assertEquals(2, card.getValue(), 0); + }); + } + + public void testSomeMatchesNumericDocValues() throws IOException { + testCase(new FieldValueQuery("number"), iw -> { + iw.addDocument(singleton(new NumericDocValuesField("number", 7))); + iw.addDocument(singleton(new NumericDocValuesField("number", 1))); + }, card -> { + assertEquals(2, card.getValue(), 0); + }); + } + + public void testQueryFiltering() throws IOException { + testCase(IntPoint.newRangeQuery("number", 0, 5), iw -> { + iw.addDocument(Arrays.asList(new IntPoint("number", 7), + new SortedNumericDocValuesField("number", 7))); + iw.addDocument(Arrays.asList(new IntPoint("number", 1), + new SortedNumericDocValuesField("number", 1))); + }, card -> { + assertEquals(1, card.getValue(), 0); + }); + } + + public void testQueryFiltersAll() throws IOException { + testCase(IntPoint.newRangeQuery("number", -1, 0), iw -> { + iw.addDocument(Arrays.asList(new IntPoint("number", 7), + new SortedNumericDocValuesField("number", 7))); + iw.addDocument(Arrays.asList(new IntPoint("number", 1), + new SortedNumericDocValuesField("number", 1))); + }, card -> { + assertEquals(0.0, card.getValue(), 0); + }); + } + + private void testCase(Query query, CheckedConsumer buildIndex, + Consumer verify) throws IOException { + Directory directory = newDirectory(); + RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); + buildIndex.accept(indexWriter); + indexWriter.close(); + + IndexReader indexReader = DirectoryReader.open(directory); + IndexSearcher indexSearcher = newSearcher(indexReader, true, true); + + CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder( + "_name", ValueType.NUMERIC).field("number"); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType( + NumberFieldMapper.NumberType.LONG); + fieldType.setName("number"); + try (CardinalityAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, + fieldType)) { + aggregator.preCollection(); + indexSearcher.search(query, aggregator); + aggregator.postCollection(); + verify.accept((InternalCardinality) aggregator.buildAggregation(0L)); + } + indexReader.close(); + directory.close(); + } +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FilterTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FilterTests.java index 1b563d531a8..47b9ab07b10 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FilterTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FilterTests.java @@ -27,8 +27,8 @@ public class FilterTests extends BaseAggregationTestCase randomAsciiOfLengthBetween(1, 20), size)) { + for (String key : randomUnique(() -> randomAlphaOfLengthBetween(1, 20), size)) { filters[i++] = new KeyedFilter(key, - QueryBuilders.termQuery(randomAsciiOfLengthBetween(5, 20), randomAsciiOfLengthBetween(5, 20))); + QueryBuilders.termQuery(randomAlphaOfLengthBetween(5, 20), randomAlphaOfLengthBetween(5, 20))); } - factory = new FiltersAggregationBuilder(randomAsciiOfLengthBetween(1, 20), filters); + factory = new FiltersAggregationBuilder(randomAlphaOfLengthBetween(1, 20), filters); } else { QueryBuilder[] filters = new QueryBuilder[size]; for (int i = 0; i < size; i++) { - filters[i] = QueryBuilders.termQuery(randomAsciiOfLengthBetween(5, 20), randomAsciiOfLengthBetween(5, 20)); + filters[i] = QueryBuilders.termQuery(randomAlphaOfLengthBetween(5, 20), randomAlphaOfLengthBetween(5, 20)); } - factory = new FiltersAggregationBuilder(randomAsciiOfLengthBetween(1, 20), filters); + factory = new FiltersAggregationBuilder(randomAlphaOfLengthBetween(1, 20), filters); } if (randomBoolean()) { factory.otherBucket(randomBoolean()); } if (randomBoolean()) { - factory.otherBucketKey(randomAsciiOfLengthBetween(1, 20)); + factory.otherBucketKey(randomAlphaOfLengthBetween(1, 20)); } return factory; } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsTests.java index e086598dbfe..9f5bd13b5f6 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsTests.java @@ -26,8 +26,8 @@ public class GeoBoundsTests extends BaseAggregationTestCase fields = new ArrayList<>(fieldsSize); for (int i = 0; i < fieldsSize; i++) { - fields.add(randomAsciiOfLengthBetween(5, 50)); + fields.add(randomAlphaOfLengthBetween(5, 50)); } factory.storedFields(fields); break; @@ -83,16 +83,16 @@ public class TopHitsTests extends BaseAggregationTestCase { counts += in.getCount(); } assertEquals(counts, reduced.getCount()); - assertEquals(sum, reduced.getSum(), 0.00000001); - assertEquals(sum / counts, reduced.value(), 0.00000001); + assertEquals(sum, reduced.getSum(), 0.0000001); + assertEquals(sum / counts, reduced.value(), 0.0000001); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/InternalCardinalityTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/InternalCardinalityTests.java new file mode 100644 index 00000000000..7c5809f323b --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/InternalCardinalityTests.java @@ -0,0 +1,82 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.metrics.cardinality; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.lease.Releasables; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.search.aggregations.InternalAggregationTestCase; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.junit.After; +import org.junit.Before; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public class InternalCardinalityTests extends InternalAggregationTestCase { + private static List algos; + private static int p; + + @Before + public void setup() { + algos = new ArrayList<>(); + p = randomIntBetween(HyperLogLogPlusPlus.MIN_PRECISION, HyperLogLogPlusPlus.MAX_PRECISION); + } + + @Override + protected InternalCardinality createTestInstance(String name, + List pipelineAggregators, Map metaData) { + HyperLogLogPlusPlus hllpp = new HyperLogLogPlusPlus(p, + new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()), 1); + algos.add(hllpp); + for (int i = 0; i < 100; i++) { + hllpp.collect(0, randomIntBetween(1, 100)); + } + return new InternalCardinality(name, hllpp, pipelineAggregators, metaData); + } + + @Override + protected Reader instanceReader() { + return InternalCardinality::new; + } + + @Override + protected void assertReduced(InternalCardinality reduced, List inputs) { + HyperLogLogPlusPlus[] algos = inputs.stream().map(InternalCardinality::getState) + .toArray(size -> new HyperLogLogPlusPlus[size]); + if (algos.length > 0) { + HyperLogLogPlusPlus result = algos[0]; + for (int i = 1; i < algos.length; i++) { + result.merge(0, algos[i], 0); + } + assertEquals(result.cardinality(0), reduced.value(), 0); + } + } + + @After + public void cleanup() { + Releasables.close(algos); + algos.clear(); + algos = null; + } +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetricTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetricTests.java index b82e822f6b0..be06bdd56b4 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetricTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetricTests.java @@ -50,13 +50,13 @@ public class InternalScriptedMetricTests extends InternalAggregationTestCase metaData) { Map params = new HashMap<>(); if (randomBoolean()) { - params.put(randomAsciiOfLength(5), randomAsciiOfLength(5)); + params.put(randomAlphaOfLength(5), randomAlphaOfLength(5)); } Script reduceScript = null; if (hasReduceScript) { reduceScript = new Script(ScriptType.INLINE, MockScriptEngine.NAME, REDUCE_SCRIPT_NAME, params); } - return new InternalScriptedMetric(name, randomAsciiOfLength(5), reduceScript, pipelineAggregators, metaData); + return new InternalScriptedMetric(name, randomAlphaOfLength(5), reduceScript, pipelineAggregators, metaData); } /** diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHitsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHitsTests.java index ba729d3c978..8265933bfb6 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHitsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHitsTests.java @@ -122,9 +122,9 @@ public class InternalTopHitsTests extends InternalAggregationTestCase usedSortFields = new HashSet<>(); for (int i = 0; i < sortFields.length; i++) { - String sortField = randomValueOtherThanMany(usedSortFields::contains, () -> randomAsciiOfLength(5)); + String sortField = randomValueOtherThanMany(usedSortFields::contains, () -> randomAlphaOfLength(5)); usedSortFields.add(sortField); SortField.Type type = randomValueOtherThanMany(t -> t == SortField.Type.CUSTOM || t == SortField.Type.REWRITEABLE, () -> randomFrom(SortField.Type.values())); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptTests.java index d1f3a31677d..b6d133f1711 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptTests.java @@ -32,11 +32,11 @@ public class BucketScriptTests extends BasePipelineAggregationTestCase bucketsPaths = new HashMap<>(); int numBucketPaths = randomIntBetween(1, 10); for (int i = 0; i < numBucketPaths; i++) { - bucketsPaths.put(randomAsciiOfLengthBetween(1, 20), randomAsciiOfLengthBetween(1, 40)); + bucketsPaths.put(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 40)); } Script script; if (randomBoolean()) { @@ -52,7 +52,7 @@ public class BucketScriptTests extends BasePipelineAggregationTestCase bucketsPaths = new HashMap<>(); int numBucketPaths = randomIntBetween(1, 10); for (int i = 0; i < numBucketPaths; i++) { - bucketsPaths.put(randomAsciiOfLengthBetween(1, 20), randomAsciiOfLengthBetween(1, 40)); + bucketsPaths.put(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 40)); } Script script; if (randomBoolean()) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumTests.java index 07cd9010ed0..3b1514a8c38 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumTests.java @@ -26,11 +26,11 @@ public class CumulativeSumTests extends BasePipelineAggregationTestCase{ + + @Override + protected InternalSimpleValue createTestInstance(String name, + List pipelineAggregators, Map metaData) { + DocValueFormat formatter = randomFrom(DocValueFormat.BOOLEAN, DocValueFormat.GEOHASH, + DocValueFormat.IP, DocValueFormat.RAW); + double value = randomDoubleBetween(0, 100000, true); + return new InternalSimpleValue(name, value, formatter, pipelineAggregators, metaData); + } + + @Override + public void testReduceRandom() { + expectThrows(UnsupportedOperationException.class, + () -> createTestInstance("name", Collections.emptyList(), null).reduce(null, + null)); + } + + @Override + protected void assertReduced(InternalSimpleValue reduced, List inputs) { + // no test since reduce operation is unsupported + } + + @Override + protected Reader instanceReader() { + return InternalSimpleValue::new; + } + +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/SerialDifferenceTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/SerialDifferenceTests.java index 01b31743828..7e71be69b64 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/SerialDifferenceTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/SerialDifferenceTests.java @@ -27,11 +27,11 @@ public class SerialDifferenceTests extends BasePipelineAggregationTestCase { + + @Override + protected InternalDerivative createTestInstance(String name, + List pipelineAggregators, Map metaData) { + DocValueFormat formatter = randomFrom(DocValueFormat.BOOLEAN, DocValueFormat.GEOHASH, + DocValueFormat.IP, DocValueFormat.RAW); + double value = randomDoubleBetween(0, 100000, true); + double normalizationFactor = randomDoubleBetween(0, 100000, true); + return new InternalDerivative(name, value, normalizationFactor, formatter, + pipelineAggregators, metaData); + } + + @Override + public void testReduceRandom() { + expectThrows(UnsupportedOperationException.class, + () -> createTestInstance("name", Collections.emptyList(), null).reduce(null, + null)); + } + + @Override + protected void assertReduced(InternalDerivative reduced, List inputs) { + // no test since reduce operation is unsupported + } + + @Override + protected Reader instanceReader() { + return InternalDerivative::new; + } + +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java index cb9d99e07cd..869a7cd58ed 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java @@ -35,11 +35,11 @@ public class MovAvgTests extends BasePipelineAggregationTestCase options = new HashMap<>(items); for (int i = 0; i < items; i++) { - options.put(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); + options.put(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)); } highlightBuilder.options(options); break; @@ -685,7 +685,7 @@ public class HighlightBuilderTests extends ESTestCase { int size = randomIntBetween(minSize, maxSize); Set randomStrings = new HashSet<>(size); for (int f = 0; f < size; f++) { - randomStrings.add(randomAsciiOfLengthBetween(3, 10)); + randomStrings.add(randomAlphaOfLengthBetween(3, 10)); } return randomStrings.toArray(new String[randomStrings.size()]); } @@ -703,11 +703,11 @@ public class HighlightBuilderTests extends ESTestCase { case 0: mutation.useExplicitFieldOrder(!original.useExplicitFieldOrder()); break; case 1: - mutation.encoder(original.encoder() + randomAsciiOfLength(2)); break; + mutation.encoder(original.encoder() + randomAlphaOfLength(2)); break; case 2: if (randomBoolean()) { // add another field - mutation.field(new Field(randomAsciiOfLength(10))); + mutation.field(new Field(randomAlphaOfLength(10))); } else { // change existing fields List originalFields = original.fields(); diff --git a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java index 027ae53a23d..b4bf2950b7d 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java @@ -38,14 +38,14 @@ import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashC public class HighlightFieldTests extends ESTestCase { public static HighlightField createTestItem() { - String name = frequently() ? randomAsciiOfLengthBetween(5, 20) : randomRealisticUnicodeOfCodepointLengthBetween(5, 20); + String name = frequently() ? randomAlphaOfLengthBetween(5, 20) : randomRealisticUnicodeOfCodepointLengthBetween(5, 20); Text[] fragments = null; if (frequently()) { int size = randomIntBetween(0, 5); fragments = new Text[size]; for (int i = 0; i < size; i++) { fragments[i] = new Text( - frequently() ? randomAsciiOfLengthBetween(10, 30) : randomRealisticUnicodeOfCodepointLengthBetween(10, 30)); + frequently() ? randomAlphaOfLengthBetween(10, 30) : randomRealisticUnicodeOfCodepointLengthBetween(10, 30)); } } return new HighlightField(name, fragments); diff --git a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 11496309d47..c0c52928d20 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -2761,7 +2761,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { for (int i = 0; i < COUNT; i++) { //generating text with word to highlight in a different position //(https://github.com/elastic/elasticsearch/issues/4103) - String prefix = randomAsciiOfLengthBetween(5, 30); + String prefix = randomAlphaOfLengthBetween(5, 30); prefixes.put(String.valueOf(i), prefix); indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field1", "Sentence " + prefix + " test. Sentence two."); diff --git a/core/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java b/core/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java index d0132cca7ad..b93b3795cb5 100644 --- a/core/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java +++ b/core/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java @@ -87,7 +87,7 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase { private ShardSearchTransportRequest createShardSearchTransportRequest() throws IOException { SearchRequest searchRequest = createSearchRequest(); - ShardId shardId = new ShardId(randomAsciiOfLengthBetween(2, 10), randomAsciiOfLengthBetween(2, 10), randomInt()); + ShardId shardId = new ShardId(randomAlphaOfLengthBetween(2, 10), randomAlphaOfLengthBetween(2, 10), randomInt()); final AliasFilter filteringAliases; if (randomBoolean()) { String[] strings = generateRandomStringArray(10, 10, false, false); diff --git a/core/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java b/core/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java index fddbd6eaaef..70375c6d208 100644 --- a/core/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java +++ b/core/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java @@ -41,8 +41,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXC public class ProfileResultTests extends ESTestCase { public static ProfileResult createTestItem(int depth) { - String type = randomAsciiOfLengthBetween(5, 10); - String description = randomAsciiOfLengthBetween(5, 10); + String type = randomAlphaOfLengthBetween(5, 10); + String description = randomAlphaOfLengthBetween(5, 10); int timingsSize = randomIntBetween(0, 5); Map timings = new HashMap<>(timingsSize); for (int i = 0; i < timingsSize; i++) { @@ -51,7 +51,7 @@ public class ProfileResultTests extends ESTestCase { // also often use "small" values in tests time = randomNonNegativeLong() % 10000; } - timings.put(randomAsciiOfLengthBetween(5, 10), time); // don't overflow Long.MAX_VALUE; + timings.put(randomAlphaOfLengthBetween(5, 10), time); // don't overflow Long.MAX_VALUE; } int childrenSize = depth > 0 ? randomIntBetween(0, 1) : 0; List children = new ArrayList<>(childrenSize); diff --git a/core/src/test/java/org/elasticsearch/search/profile/SearchProfileShardResultsTests.java b/core/src/test/java/org/elasticsearch/search/profile/SearchProfileShardResultsTests.java index 2d9cf847861..82ed118f559 100644 --- a/core/src/test/java/org/elasticsearch/search/profile/SearchProfileShardResultsTests.java +++ b/core/src/test/java/org/elasticsearch/search/profile/SearchProfileShardResultsTests.java @@ -51,7 +51,7 @@ public class SearchProfileShardResultsTests extends ESTestCase { queryProfileResults.add(QueryProfileShardResultTests.createTestItem()); } AggregationProfileShardResult aggProfileShardResult = AggregationProfileShardResultTests.createTestItem(1); - searchProfileResults.put(randomAsciiOfLengthBetween(5, 10), new ProfileShardResult(queryProfileResults, aggProfileShardResult)); + searchProfileResults.put(randomAlphaOfLengthBetween(5, 10), new ProfileShardResult(queryProfileResults, aggProfileShardResult)); } return new SearchProfileShardResults(searchProfileResults); } diff --git a/core/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java b/core/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java index 342da16f508..b09c177bf0b 100644 --- a/core/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java +++ b/core/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java @@ -28,8 +28,6 @@ import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregator; import org.elasticsearch.search.aggregations.metrics.max.MaxAggregator; import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.search.profile.ProfileShardResult; -import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult; -import org.elasticsearch.search.profile.aggregation.AggregationTimingType; import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; import java.util.List; @@ -68,7 +66,7 @@ public class AggregationProfilerIT extends ESIntegTestCase { String[] randomStrings = new String[randomIntBetween(2, 10)]; for (int i = 0; i < randomStrings.length; i++) { - randomStrings[i] = randomAsciiOfLength(10); + randomStrings[i] = randomAlphaOfLength(10); } for (int i = 0; i < 5; i++) { diff --git a/core/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java b/core/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java index 43e1e32db36..698e4a387aa 100644 --- a/core/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java +++ b/core/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java @@ -39,8 +39,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXC public class CollectorResultTests extends ESTestCase { public static CollectorResult createTestItem(int depth) { - String name = randomAsciiOfLengthBetween(5, 10); - String reason = randomAsciiOfLengthBetween(5, 10); + String name = randomAlphaOfLengthBetween(5, 10); + String reason = randomAlphaOfLengthBetween(5, 10); long time = randomNonNegativeLong(); if (randomBoolean()) { // also often use relatively "small" values, otherwise we will mostly test huge longs diff --git a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index 2002e7ce508..7ae22485df3 100644 --- a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -720,7 +720,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase { private static List fillRandom(List list, int times) { for (int i = 0; i < times; i++) { - list.add(randomAsciiOfLength(5)); + list.add(randomAlphaOfLength(5)); } return list; } diff --git a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java index 53277bd5b3c..ee68ea60036 100644 --- a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java @@ -135,7 +135,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { final long nowInMillis = randomNonNegativeLong(); Settings indexSettings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAsciiOfLengthBetween(1, 10), indexSettings); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAlphaOfLengthBetween(1, 10), indexSettings); // shard context will only need indicesQueriesRegistry for building Query objects nested in query rescorer QueryShardContext mockShardContext = new QueryShardContext(0, idxSettings, null, null, null, null, null, xContentRegistry(), null, null, () -> nowInMillis) { @@ -303,7 +303,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { */ public static QueryRescorerBuilder randomRescoreBuilder() { QueryBuilder queryBuilder = new MatchAllQueryBuilder().boost(randomFloat()) - .queryName(randomAsciiOfLength(20)); + .queryName(randomAlphaOfLength(20)); org.elasticsearch.search.rescore.QueryRescorerBuilder rescorer = new org.elasticsearch.search.rescore.QueryRescorerBuilder(queryBuilder); if (randomBoolean()) { diff --git a/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java index cb45208a39f..2179444aad7 100644 --- a/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java @@ -58,7 +58,7 @@ public class SearchAfterBuilderTests extends ESTestCase { values[i] = randomDouble(); break; case 4: - values[i] = randomAsciiOfLengthBetween(5, 20); + values[i] = randomAlphaOfLengthBetween(5, 20); break; case 5: values[i] = randomBoolean(); @@ -70,7 +70,7 @@ public class SearchAfterBuilderTests extends ESTestCase { values[i] = randomShort(); break; case 8: - values[i] = new Text(randomAsciiOfLengthBetween(5, 20)); + values[i] = new Text(randomAlphaOfLengthBetween(5, 20)); break; case 9: values[i] = null; @@ -106,7 +106,7 @@ public class SearchAfterBuilderTests extends ESTestCase { jsonBuilder.value(randomDouble()); break; case 4: - jsonBuilder.value(randomAsciiOfLengthBetween(5, 20)); + jsonBuilder.value(randomAlphaOfLengthBetween(5, 20)); break; case 5: jsonBuilder.value(randomBoolean()); @@ -118,7 +118,7 @@ public class SearchAfterBuilderTests extends ESTestCase { jsonBuilder.value(randomShort()); break; case 8: - jsonBuilder.value(new Text(randomAsciiOfLengthBetween(5, 20))); + jsonBuilder.value(new Text(randomAlphaOfLengthBetween(5, 20))); break; case 9: jsonBuilder.nullValue(); diff --git a/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java index 05a5c4acbe6..76faac73415 100644 --- a/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java +++ b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterIT.java @@ -31,7 +31,6 @@ import org.elasticsearch.search.SearchContextException; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.transport.RemoteTransportException; import org.hamcrest.Matchers; import java.util.List; @@ -201,7 +200,7 @@ public class SearchAfterIT extends ESIntegTestCase { values.add(randomDouble()); break; case 6: - values.add(randomAsciiOfLengthBetween(5, 20)); + values.add(randomAlphaOfLengthBetween(5, 20)); break; } } diff --git a/core/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java b/core/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java index aad6aa0982c..aa166f6e1a2 100644 --- a/core/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java +++ b/core/src/test/java/org/elasticsearch/search/slice/SearchSliceIT.java @@ -83,7 +83,7 @@ public class SearchSliceIT extends ESIntegTestCase { for (int i = 0; i < NUM_DOCS; i++) { XContentBuilder builder = jsonBuilder(); builder.startObject(); - builder.field("invalid_random_kw", randomAsciiOfLengthBetween(5, 20)); + builder.field("invalid_random_kw", randomAlphaOfLengthBetween(5, 20)); builder.field("random_int", randomInt()); builder.field("static_int", 0); builder.field("invalid_random_int", randomInt()); diff --git a/core/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java b/core/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java index 9de5ee2a583..f7ba4286c2b 100644 --- a/core/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java @@ -64,7 +64,7 @@ public class SliceBuilderTests extends ESTestCase { private static SliceBuilder randomSliceBuilder() throws IOException { int max = randomIntBetween(2, MAX_SLICE); int id = randomIntBetween(1, max - 1); - String field = randomAsciiOfLengthBetween(5, 20); + String field = randomAlphaOfLengthBetween(5, 20); return new SliceBuilder(field, id, max); } diff --git a/core/src/test/java/org/elasticsearch/search/slice/TermsSliceQueryTests.java b/core/src/test/java/org/elasticsearch/search/slice/TermsSliceQueryTests.java index e00dabc6363..3fa4ce41052 100644 --- a/core/src/test/java/org/elasticsearch/search/slice/TermsSliceQueryTests.java +++ b/core/src/test/java/org/elasticsearch/search/slice/TermsSliceQueryTests.java @@ -34,6 +34,7 @@ import org.apache.lucene.search.QueryUtils; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.StringHelper; import org.elasticsearch.common.UUIDs; import org.elasticsearch.test.ESTestCase; @@ -72,7 +73,8 @@ public class TermsSliceQueryTests extends ESTestCase { Document doc = new Document(); String uuid = UUIDs.base64UUID(); BytesRef br = new BytesRef(uuid); - int id = Math.floorMod(br.hashCode(), max); + int hashCode = StringHelper.murmurhash3_x86_32(br, TermsSliceQuery.SEED); + int id = Math.floorMod(hashCode, max); sliceCounters[id] ++; doc.add(new StringField("uuid", uuid, Field.Store.YES)); w.addDocument(doc); diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index 17c60152aae..6f1087561c4 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -70,7 +70,6 @@ import org.junit.BeforeClass; import java.io.IOException; import java.nio.file.Path; import java.util.Collections; -import java.util.Map; import static java.util.Collections.emptyList; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; @@ -191,7 +190,7 @@ public abstract class AbstractSortTestCase> extends EST } protected QueryShardContext createMockShardContext() { - Index index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_"); + Index index = new Index(randomAlphaOfLengthBetween(1, 10), "_na_"); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build()); IndicesFieldDataCache cache = new IndicesFieldDataCache(Settings.EMPTY, null); @@ -245,7 +244,7 @@ public abstract class AbstractSortTestCase> extends EST case 0: return (new MatchAllQueryBuilder()).boost(randomFloat()); case 1: return (new IdsQueryBuilder()).boost(randomFloat()); case 2: return (new TermQueryBuilder( - randomAsciiOfLengthBetween(1, 10), + randomAlphaOfLengthBetween(1, 10), randomDouble()).boost(randomFloat())); default: throw new IllegalStateException("Only three query builders supported for testing sort"); } diff --git a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java index 111d4256eb3..57e79ce98c3 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java @@ -39,12 +39,12 @@ public class FieldSortBuilderTests extends AbstractSortTestCase missingContent = Arrays.asList( "_last", "_first", - randomAsciiOfLength(10), randomUnicodeOfCodepointLengthBetween(5, 15), + Integer.toString(randomInt()), randomInt()); public FieldSortBuilder randomFieldSortBuilder() { - String fieldName = rarely() ? FieldSortBuilder.DOC_FIELD_NAME : randomAsciiOfLengthBetween(1, 10); + String fieldName = rarely() ? FieldSortBuilder.DOC_FIELD_NAME : randomAlphaOfLengthBetween(1, 10); FieldSortBuilder builder = new FieldSortBuilder(fieldName); if (randomBoolean()) { builder.order(randomFrom(SortOrder.values())); @@ -55,7 +55,7 @@ public class FieldSortBuilderTests extends AbstractSortTestCase randomAsciiOfLengthBetween(1, 10))); + () -> randomAlphaOfLengthBetween(1, 10))); break; case 1: mutated.setNestedFilter(randomValueOtherThan( @@ -94,7 +94,7 @@ public class FieldSortBuilderTests extends AbstractSortTestCase randomAsciiOfLengthBetween(1, 10))); + () -> randomAlphaOfLengthBetween(1, 10))); break; case 4: mutated.missing(randomValueOtherThan(original.missing(), () -> randomFrom(missingContent))); diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java index cc0263c31f5..99ab42c6e42 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java @@ -50,7 +50,7 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase randomAsciiOfLengthBetween(1, 10))); + () -> randomAlphaOfLengthBetween(1, 10))); } if (randomBoolean()) { result.validation(randomValueOtherThan(result.validation(), () -> randomFrom(GeoValidationMethod.values()))); @@ -166,7 +166,7 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase randomAsciiOfLengthBetween(1, 10))); + () -> randomAlphaOfLengthBetween(1, 10))); break; case 8: result.validation(randomValueOtherThan(result.validation(), () -> randomFrom(GeoValidationMethod.values()))); diff --git a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java index 1ad5de4071f..526be491f3d 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java @@ -43,7 +43,7 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase randomSuggestion) { - randomSuggestion.text(randomAsciiOfLengthBetween(2, 20)); // have to set the text because we don't know if the global text was set - maybeSet(randomSuggestion::prefix, randomAsciiOfLengthBetween(2, 20)); - maybeSet(randomSuggestion::regex, randomAsciiOfLengthBetween(2, 20)); - maybeSet(randomSuggestion::analyzer, randomAsciiOfLengthBetween(2, 20)); + randomSuggestion.text(randomAlphaOfLengthBetween(2, 20)); // have to set the text because we don't know if the global text was set + maybeSet(randomSuggestion::prefix, randomAlphaOfLengthBetween(2, 20)); + maybeSet(randomSuggestion::regex, randomAlphaOfLengthBetween(2, 20)); + maybeSet(randomSuggestion::analyzer, randomAlphaOfLengthBetween(2, 20)); maybeSet(randomSuggestion::size, randomIntBetween(1, 20)); maybeSet(randomSuggestion::shardSize, randomIntBetween(1, 20)); } @@ -146,16 +146,16 @@ public abstract class AbstractSuggestionBuilderTestCase randomAsciiOfLengthBetween(2, 20))); + mutation.text(randomValueOtherThan(mutation.text(), () -> randomAlphaOfLengthBetween(2, 20))); break; case 1: - mutation.prefix(randomValueOtherThan(mutation.prefix(), () -> randomAsciiOfLengthBetween(2, 20))); + mutation.prefix(randomValueOtherThan(mutation.prefix(), () -> randomAlphaOfLengthBetween(2, 20))); break; case 2: - mutation.regex(randomValueOtherThan(mutation.regex(), () -> randomAsciiOfLengthBetween(2, 20))); + mutation.regex(randomValueOtherThan(mutation.regex(), () -> randomAlphaOfLengthBetween(2, 20))); break; case 3: - mutation.analyzer(randomValueOtherThan(mutation.analyzer(), () -> randomAsciiOfLengthBetween(2, 20))); + mutation.analyzer(randomValueOtherThan(mutation.analyzer(), () -> randomAlphaOfLengthBetween(2, 20))); break; case 4: mutation.size(randomValueOtherThan(mutation.size(), () -> randomIntBetween(1, 20))); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java index 4e93201a414..9aa4d9dc2b4 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java @@ -42,7 +42,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXC public class CompletionSuggestionOptionTests extends ESTestCase { public static Option createTestItem() { - Text text = new Text(randomAsciiOfLengthBetween(5, 15)); + Text text = new Text(randomAlphaOfLengthBetween(5, 15)); int docId = randomInt(); int numberOfContexts = randomIntBetween(0, 3); Map> contexts = new HashMap<>(); @@ -50,9 +50,9 @@ public class CompletionSuggestionOptionTests extends ESTestCase { int numberOfValues = randomIntBetween(0, 3); Set values = new HashSet<>(); for (int v = 0; v < numberOfValues; v++) { - values.add(randomAsciiOfLengthBetween(5, 15)); + values.add(randomAlphaOfLengthBetween(5, 15)); } - contexts.put(randomAsciiOfLengthBetween(5, 15), values); + contexts.put(randomAlphaOfLengthBetween(5, 15), values); } SearchHit hit = null; float score = randomFloat(); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java index fa94eabeb53..a3ea5e99324 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -631,7 +631,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { refresh(); - String suggestionName = randomAsciiOfLength(10); + String suggestionName = randomAlphaOfLength(10); CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(FIELD).text("h").size(10) .contexts(Collections.singletonMap("st", Collections.singletonList(GeoQueryContext.builder().setGeoPoint(new GeoPoint(52.52, 13.4)).build()))); SearchResponse searchResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion(suggestionName, context)).get(); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java index dde5a730457..d5eee4d3dee 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java @@ -82,9 +82,9 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase { .endObject()) .setRefreshPolicy(IMMEDIATE).get(); - String randomText = randomAsciiOfLength(10); - String randomField = randomAsciiOfLength(10); - String randomSuffix = randomAsciiOfLength(10); + String randomText = randomAlphaOfLength(10); + String randomField = randomAlphaOfLength(10); + String randomSuffix = randomAlphaOfLength(10); SuggestBuilder suggestBuilder = new SuggestBuilder(); suggestBuilder.addSuggestion("someName", new CustomSuggestionBuilder(randomField, randomSuffix).text(randomText)); SearchRequestBuilder searchRequestBuilder = client().prepareSearch("test").setTypes("test").setFrom(0).setSize(1) diff --git a/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java index 9029a097caa..61094be8a35 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java @@ -129,9 +129,9 @@ public class SuggestBuilderTests extends ESTestCase { mutation.addSuggestion(suggestionBuilder.getKey(), suggestionBuilder.getValue()); } if (randomBoolean()) { - mutation.setGlobalText(randomAsciiOfLengthBetween(5, 60)); + mutation.setGlobalText(randomAlphaOfLengthBetween(5, 60)); } else { - mutation.addSuggestion(randomAsciiOfLength(10), PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder()); + mutation.addSuggestion(randomAlphaOfLength(10), PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder()); } return mutation; } @@ -139,11 +139,11 @@ public class SuggestBuilderTests extends ESTestCase { public static SuggestBuilder randomSuggestBuilder() { SuggestBuilder builder = new SuggestBuilder(); if (randomBoolean()) { - builder.setGlobalText(randomAsciiOfLengthBetween(1, 20)); + builder.setGlobalText(randomAlphaOfLengthBetween(1, 20)); } final int numSuggestions = randomIntBetween(1, 5); for (int i = 0; i < numSuggestions; i++) { - builder.addSuggestion(randomAsciiOfLengthBetween(5, 10), randomSuggestionBuilder()); + builder.addSuggestion(randomAlphaOfLengthBetween(5, 10), randomSuggestionBuilder()); } return builder; } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java b/core/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java index 24e98899e87..8a6713caf5f 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java @@ -125,9 +125,9 @@ public class SuggestTests extends ESTestCase { public void testFilter() throws Exception { List>> suggestions; - CompletionSuggestion completionSuggestion = new CompletionSuggestion(randomAsciiOfLength(10), 2); - PhraseSuggestion phraseSuggestion = new PhraseSuggestion(randomAsciiOfLength(10), 2); - TermSuggestion termSuggestion = new TermSuggestion(randomAsciiOfLength(10), 2, SortBy.SCORE); + CompletionSuggestion completionSuggestion = new CompletionSuggestion(randomAlphaOfLength(10), 2); + PhraseSuggestion phraseSuggestion = new PhraseSuggestion(randomAlphaOfLength(10), 2); + TermSuggestion termSuggestion = new TermSuggestion(randomAlphaOfLength(10), 2, SortBy.SCORE); suggestions = Arrays.asList(completionSuggestion, phraseSuggestion, termSuggestion); Suggest suggest = new Suggest(suggestions); List phraseSuggestions = suggest.filter(PhraseSuggestion.class); @@ -146,7 +146,7 @@ public class SuggestTests extends ESTestCase { suggestions = new ArrayList<>(); int n = randomIntBetween(2, 5); for (int i = 0; i < n; i++) { - suggestions.add(new CompletionSuggestion(randomAsciiOfLength(10), randomIntBetween(3, 5))); + suggestions.add(new CompletionSuggestion(randomAlphaOfLength(10), randomIntBetween(3, 5))); } Collections.shuffle(suggestions, random()); Suggest suggest = new Suggest(suggestions); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/SuggestionEntryTests.java b/core/src/test/java/org/elasticsearch/search/suggest/SuggestionEntryTests.java index 07f310dfd0f..ddb49048274 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/SuggestionEntryTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/SuggestionEntryTests.java @@ -55,7 +55,7 @@ public class SuggestionEntryTests extends ESTestCase { */ @SuppressWarnings("unchecked") public static Entry createTestItem(Class entryType) { - Text entryText = new Text(randomAsciiOfLengthBetween(5, 15)); + Text entryText = new Text(randomAlphaOfLengthBetween(5, 15)); int offset = randomInt(); int length = randomInt(); Entry entry; diff --git a/core/src/test/java/org/elasticsearch/search/suggest/SuggestionOptionTests.java b/core/src/test/java/org/elasticsearch/search/suggest/SuggestionOptionTests.java index 9ffad11b078..268f7429d71 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/SuggestionOptionTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/SuggestionOptionTests.java @@ -35,9 +35,9 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXC public class SuggestionOptionTests extends ESTestCase { public static Option createTestItem() { - Text text = new Text(randomAsciiOfLengthBetween(5, 15)); + Text text = new Text(randomAlphaOfLengthBetween(5, 15)); float score = randomFloat(); - Text highlighted = randomFrom((Text) null, new Text(randomAsciiOfLengthBetween(5, 15))); + Text highlighted = randomFrom((Text) null, new Text(randomAlphaOfLengthBetween(5, 15))); Boolean collateMatch = randomFrom((Boolean) null, randomBoolean()); return new Option(text, highlighted, score, collateMatch); } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/SuggestionTests.java b/core/src/test/java/org/elasticsearch/search/suggest/SuggestionTests.java index 259ff50e0b7..9f1607d9d65 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/SuggestionTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/SuggestionTests.java @@ -65,7 +65,7 @@ public class SuggestionTests extends ESTestCase { @SuppressWarnings({ "unchecked", "rawtypes" }) public static Suggestion> createTestItem(Class type) { - String name = randomAsciiOfLengthBetween(5, 10); + String name = randomAlphaOfLengthBetween(5, 10); // note: size will not be rendered via "toXContent", only passed on internally on transport layer int size = randomInt(); Supplier entrySupplier = null; diff --git a/core/src/test/java/org/elasticsearch/search/suggest/TermSuggestionOptionTests.java b/core/src/test/java/org/elasticsearch/search/suggest/TermSuggestionOptionTests.java index ad2aed0226f..668629fe57e 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/TermSuggestionOptionTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/TermSuggestionOptionTests.java @@ -35,7 +35,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXC public class TermSuggestionOptionTests extends ESTestCase { public static Option createTestItem() { - Text text = new Text(randomAsciiOfLengthBetween(5, 15)); + Text text = new Text(randomAlphaOfLengthBetween(5, 15)); float score = randomFloat(); int freq = randomInt(); return new Option(text, freq, score); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryQueryContextTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryQueryContextTests.java index 59d7bbd61b9..658f145b1c6 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryQueryContextTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CategoryQueryContextTests.java @@ -28,7 +28,7 @@ public class CategoryQueryContextTests extends QueryContextTestCase> contextMap = new HashMap<>(); @@ -77,7 +77,7 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe for (int i = 0; i < numContext; i++) { contexts.add(CategoryQueryContextTests.randomCategoryQueryContext()); } - String name = randomAsciiOfLength(10); + String name = randomAlphaOfLength(10); contextMap.put(name, contexts); builderAndInfo.catContexts.add(name); } @@ -87,7 +87,7 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe for (int i = 0; i < numContext; i++) { contexts.add(GeoQueryContextTests.randomGeoQueryContext()); } - String name = randomAsciiOfLength(10); + String name = randomAlphaOfLength(10); contextMap.put(name, contexts); builderAndInfo.geoContexts.add(name); } @@ -114,7 +114,7 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe for (int i = 0; i < nCatContext; i++) { contexts.add(CategoryQueryContextTests.randomCategoryQueryContext()); } - builder.contexts(Collections.singletonMap(randomAsciiOfLength(10), contexts)); + builder.contexts(Collections.singletonMap(randomAlphaOfLength(10), contexts)); break; case 1: int nGeoContext = randomIntBetween(1, 5); @@ -122,16 +122,16 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe for (int i = 0; i < nGeoContext; i++) { geoContexts.add(GeoQueryContextTests.randomGeoQueryContext()); } - builder.contexts(Collections.singletonMap(randomAsciiOfLength(10), geoContexts)); + builder.contexts(Collections.singletonMap(randomAlphaOfLength(10), geoContexts)); break; case 2: - builder.prefix(randomAsciiOfLength(10), FuzzyOptionsTests.randomFuzzyOptions()); + builder.prefix(randomAlphaOfLength(10), FuzzyOptionsTests.randomFuzzyOptions()); break; case 3: - builder.prefix(randomAsciiOfLength(10), randomFrom(Fuzziness.ZERO, Fuzziness.ONE, Fuzziness.TWO)); + builder.prefix(randomAlphaOfLength(10), randomFrom(Fuzziness.ZERO, Fuzziness.ONE, Fuzziness.TWO)); break; case 4: - builder.regex(randomAsciiOfLength(10), RegexOptionsTests.randomRegexOptions()); + builder.regex(randomAlphaOfLength(10), RegexOptionsTests.randomRegexOptions()); break; default: throw new IllegalStateException("should not through"); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionTests.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionTests.java index 60722fb5f6d..4b0e60a1d00 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionTests.java @@ -35,7 +35,7 @@ public class CompletionSuggestionTests extends ESTestCase { public void testToReduce() throws Exception { List> shardSuggestions = new ArrayList<>(); int nShards = randomIntBetween(1, 10); - String name = randomAsciiOfLength(10); + String name = randomAlphaOfLength(10); int size = randomIntBetween(3, 5); for (int i = 0; i < nShards; i++) { CompletionSuggestion suggestion = new CompletionSuggestion(name, size); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java index 6c30ccb4570..10022cc289a 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java @@ -177,7 +177,7 @@ public class DirectCandidateGeneratorTests extends ESTestCase { * create random {@link DirectCandidateGeneratorBuilder} */ public static DirectCandidateGeneratorBuilder randomCandidateGenerator() { - DirectCandidateGeneratorBuilder generator = new DirectCandidateGeneratorBuilder(randomAsciiOfLength(10)); + DirectCandidateGeneratorBuilder generator = new DirectCandidateGeneratorBuilder(randomAlphaOfLength(10)); maybeSet(generator::accuracy, randomFloat()); maybeSet(generator::maxEdits, randomIntBetween(1, 2)); maybeSet(generator::maxInspections, randomIntBetween(1, 20)); @@ -185,8 +185,8 @@ public class DirectCandidateGeneratorTests extends ESTestCase { maybeSet(generator::minDocFreq, randomFloat()); maybeSet(generator::minWordLength, randomIntBetween(1, 20)); maybeSet(generator::prefixLength, randomIntBetween(1, 20)); - maybeSet(generator::preFilter, randomAsciiOfLengthBetween(1, 20)); - maybeSet(generator::postFilter, randomAsciiOfLengthBetween(1, 20)); + maybeSet(generator::preFilter, randomAlphaOfLengthBetween(1, 20)); + maybeSet(generator::postFilter, randomAlphaOfLengthBetween(1, 20)); maybeSet(generator::size, randomIntBetween(1, 20)); maybeSet(generator::sort, randomFrom("score", "frequency")); maybeSet(generator::stringDistance, randomFrom("internal", "damerau_levenshtein", "levenstein", "jarowinkler", "ngram")); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java index eab8440fa8c..8b10a17064e 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java @@ -33,13 +33,13 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC } public static PhraseSuggestionBuilder randomPhraseSuggestionBuilder() { - PhraseSuggestionBuilder testBuilder = new PhraseSuggestionBuilder(randomAsciiOfLengthBetween(2, 20)); + PhraseSuggestionBuilder testBuilder = new PhraseSuggestionBuilder(randomAlphaOfLengthBetween(2, 20)); setCommonPropertiesOnRandomBuilder(testBuilder); maybeSet(testBuilder::maxErrors, randomFloat()); - maybeSet(testBuilder::separator, randomAsciiOfLengthBetween(1, 10)); + maybeSet(testBuilder::separator, randomAlphaOfLengthBetween(1, 10)); maybeSet(testBuilder::realWordErrorLikelihood, randomFloat()); maybeSet(testBuilder::confidence, randomFloat()); - maybeSet(testBuilder::collateQuery, randomAsciiOfLengthBetween(3, 20)); + maybeSet(testBuilder::collateQuery, randomAlphaOfLengthBetween(3, 20)); // collate query prune and parameters will only be used when query is set if (testBuilder.collateQuery() != null) { maybeSet(testBuilder::collatePrune, randomBoolean()); @@ -47,14 +47,14 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC Map collateParams = new HashMap<>(); int numParams = randomIntBetween(1, 5); for (int i = 0; i < numParams; i++) { - collateParams.put(randomAsciiOfLength(5), randomAsciiOfLength(5)); + collateParams.put(randomAlphaOfLength(5), randomAlphaOfLength(5)); } testBuilder.collateParams(collateParams ); } } if (randomBoolean()) { // preTag, postTag - testBuilder.highlight(randomAsciiOfLengthBetween(3, 20), randomAsciiOfLengthBetween(3, 20)); + testBuilder.highlight(randomAlphaOfLengthBetween(3, 20), randomAlphaOfLengthBetween(3, 20)); } maybeSet(testBuilder::gramSize, randomIntBetween(1, 5)); maybeSet(testBuilder::forceUnigrams, randomBoolean()); @@ -106,14 +106,14 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC builder.tokenLimit(randomValueOtherThan(builder.tokenLimit(), () -> randomIntBetween(1, 20))); break; case 5: - builder.separator(randomValueOtherThan(builder.separator(), () -> randomAsciiOfLengthBetween(1, 10))); + builder.separator(randomValueOtherThan(builder.separator(), () -> randomAlphaOfLengthBetween(1, 10))); break; case 6: Script collateQuery = builder.collateQuery(); if (collateQuery != null) { - builder.collateQuery(randomValueOtherThan(collateQuery.getIdOrCode(), () -> randomAsciiOfLengthBetween(3, 20))); + builder.collateQuery(randomValueOtherThan(collateQuery.getIdOrCode(), () -> randomAlphaOfLengthBetween(3, 20))); } else { - builder.collateQuery(randomAsciiOfLengthBetween(3, 20)); + builder.collateQuery(randomAlphaOfLengthBetween(3, 20)); } break; case 7: @@ -126,7 +126,7 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC // simply double both values builder.highlight(builder.preTag() + builder.preTag(), builder.postTag() + builder.postTag()); } else { - builder.highlight(randomAsciiOfLengthBetween(3, 20), randomAsciiOfLengthBetween(3, 20)); + builder.highlight(randomAlphaOfLengthBetween(3, 20), randomAlphaOfLengthBetween(3, 20)); } break; case 9: @@ -134,7 +134,7 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC break; case 10: Map collateParams = builder.collateParams() == null ? new HashMap<>(1) : builder.collateParams(); - collateParams.put(randomAsciiOfLength(5), randomAsciiOfLength(5)); + collateParams.put(randomAlphaOfLength(5), randomAlphaOfLength(5)); builder.collateParams(collateParams); break; case 11: @@ -155,7 +155,7 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC e = expectThrows(IllegalArgumentException.class, () -> new PhraseSuggestionBuilder("")); assertEquals("suggestion field name is empty", e.getMessage()); - PhraseSuggestionBuilder builder = new PhraseSuggestionBuilder(randomAsciiOfLengthBetween(2, 20)); + PhraseSuggestionBuilder builder = new PhraseSuggestionBuilder(randomAlphaOfLengthBetween(2, 20)); e = expectThrows(IllegalArgumentException.class, () -> builder.gramSize(0)); assertEquals("gramSize must be >= 1", e.getMessage()); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java index f27d02fbe22..c5471f3ee0a 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilderTests.java @@ -58,7 +58,7 @@ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCas * Creates a random TermSuggestionBuilder */ public static TermSuggestionBuilder randomTermSuggestionBuilder() { - TermSuggestionBuilder testBuilder = new TermSuggestionBuilder(randomAsciiOfLengthBetween(2, 20)); + TermSuggestionBuilder testBuilder = new TermSuggestionBuilder(randomAlphaOfLengthBetween(2, 20)); setCommonPropertiesOnRandomBuilder(testBuilder); maybeSet(testBuilder::suggestMode, randomSuggestMode()); maybeSet(testBuilder::accuracy, randomFloat()); @@ -151,7 +151,7 @@ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCas e = expectThrows(IllegalArgumentException.class, () -> new TermSuggestionBuilder("")); assertEquals("suggestion field name is empty", e.getMessage()); - TermSuggestionBuilder builder = new TermSuggestionBuilder(randomAsciiOfLengthBetween(2, 20)); + TermSuggestionBuilder builder = new TermSuggestionBuilder(randomAlphaOfLengthBetween(2, 20)); // test invalid accuracy values expectThrows(IllegalArgumentException.class, () -> builder.accuracy(-0.5f)); @@ -193,7 +193,7 @@ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCas } public void testDefaultValuesSet() { - TermSuggestionBuilder builder = new TermSuggestionBuilder(randomAsciiOfLengthBetween(2, 20)); + TermSuggestionBuilder builder = new TermSuggestionBuilder(randomAlphaOfLengthBetween(2, 20)); assertEquals(DEFAULT_ACCURACY, builder.accuracy(), Float.MIN_VALUE); assertEquals(DEFAULT_MAX_EDITS, builder.maxEdits()); assertEquals(DEFAULT_MAX_INSPECTIONS, builder.maxInspections()); diff --git a/core/src/test/java/org/elasticsearch/snapshots/BlobStoreFormatIT.java b/core/src/test/java/org/elasticsearch/snapshots/BlobStoreFormatIT.java index 56ff111e4b0..9d0e353cd44 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/BlobStoreFormatIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/BlobStoreFormatIT.java @@ -151,7 +151,7 @@ public class BlobStoreFormatIT extends AbstractSnapshotIntegTestCase { public void testBlobCorruption() throws IOException { BlobStore blobStore = createTestBlobStore(); BlobContainer blobContainer = blobStore.blobContainer(BlobPath.cleanPath()); - String testString = randomAsciiOfLength(randomInt(10000)); + String testString = randomAlphaOfLength(randomInt(10000)); BlobObj blobObj = new BlobObj(testString); ChecksumBlobStoreFormat checksumFormat = new ChecksumBlobStoreFormat<>(BLOB_CODEC, "%s", BlobObj::fromXContent, xContentRegistry(), randomBoolean(), randomBoolean() ? XContentType.SMILE : XContentType.JSON); @@ -171,7 +171,7 @@ public class BlobStoreFormatIT extends AbstractSnapshotIntegTestCase { public void testAtomicWrite() throws Exception { final BlobStore blobStore = createTestBlobStore(); final BlobContainer blobContainer = blobStore.blobContainer(BlobPath.cleanPath()); - String testString = randomAsciiOfLength(randomInt(10000)); + String testString = randomAlphaOfLength(randomInt(10000)); final CountDownLatch block = new CountDownLatch(1); final CountDownLatch unblock = new CountDownLatch(1); final BlobObj blobObj = new BlobObj(testString) { diff --git a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index 8f6701977e4..08a3308172b 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.snapshots; import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.IntSet; - import org.elasticsearch.action.ListenableActionFuture; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; @@ -55,7 +54,6 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.discovery.zen.ElectMasterService; -import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; @@ -150,14 +148,9 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest public void testRestorePersistentSettings() throws Exception { logger.info("--> start 2 nodes"); - Settings nodeSettings = Settings.builder() - .put("discovery.type", "zen") - .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "200ms") - .put("discovery.initial_state_timeout", "500ms") - .build(); - internalCluster().startNode(nodeSettings); + internalCluster().startNode(); Client client = client(); - String secondNode = internalCluster().startNode(nodeSettings); + String secondNode = internalCluster().startNode(); logger.info("--> wait for the second node to join the cluster"); assertThat(client.admin().cluster().prepareHealth().setWaitForNodes("2").get().isTimedOut(), equalTo(false)); @@ -354,7 +347,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest .setType("mock").setSettings( Settings.builder() .put("location", randomRepoPath()) - .put("random", randomAsciiOfLength(10)) + .put("random", randomAlphaOfLength(10)) .put("wait_after_unblock", 200) ).get(); assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); @@ -402,7 +395,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest .setType("mock").setSettings( Settings.builder() .put("location", repo) - .put("random", randomAsciiOfLength(10)) + .put("random", randomAlphaOfLength(10)) .put("wait_after_unblock", 200) ).get(); assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); diff --git a/core/src/test/java/org/elasticsearch/snapshots/MinThreadsSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/MinThreadsSnapshotRestoreIT.java index d1759d83e34..680bfea6b3b 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/MinThreadsSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/MinThreadsSnapshotRestoreIT.java @@ -59,7 +59,7 @@ public class MinThreadsSnapshotRestoreIT extends AbstractSnapshotIntegTestCase { assertAcked(client().admin().cluster().preparePutRepository(repo).setType("mock").setSettings( Settings.builder() .put("location", randomRepoPath()) - .put("random", randomAsciiOfLength(10)) + .put("random", randomAlphaOfLength(10)) .put("wait_after_unblock", 200)).get()); logger.info("--> snapshot twice"); @@ -113,7 +113,7 @@ public class MinThreadsSnapshotRestoreIT extends AbstractSnapshotIntegTestCase { assertAcked(client().admin().cluster().preparePutRepository(repo).setType("mock").setSettings( Settings.builder() .put("location", randomRepoPath()) - .put("random", randomAsciiOfLength(10)) + .put("random", randomAlphaOfLength(10)) .put("wait_after_unblock", 200)).get()); logger.info("--> snapshot"); @@ -160,7 +160,7 @@ public class MinThreadsSnapshotRestoreIT extends AbstractSnapshotIntegTestCase { assertAcked(client().admin().cluster().preparePutRepository(repo).setType("mock").setSettings( Settings.builder() .put("location", randomRepoPath()) - .put("random", randomAsciiOfLength(10)) + .put("random", randomAlphaOfLength(10)) .put("wait_after_unblock", 200)).get()); logger.info("--> snapshot"); diff --git a/core/src/test/java/org/elasticsearch/snapshots/RepositoriesMetaDataSerializationTests.java b/core/src/test/java/org/elasticsearch/snapshots/RepositoriesMetaDataSerializationTests.java index 101ae591292..8f027b66950 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/RepositoriesMetaDataSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/snapshots/RepositoriesMetaDataSerializationTests.java @@ -42,7 +42,7 @@ public class RepositoriesMetaDataSerializationTests extends AbstractDiffableSeri int numberOfRepositories = randomInt(10); List entries = new ArrayList<>(); for (int i = 0; i < numberOfRepositories; i++) { - entries.add(new RepositoryMetaData(randomAsciiOfLength(10), randomAsciiOfLength(10), randomSettings())); + entries.add(new RepositoryMetaData(randomAlphaOfLength(10), randomAlphaOfLength(10), randomSettings())); } entries.sort(Comparator.comparing(RepositoryMetaData::name)); return new RepositoriesMetaData(entries.toArray(new RepositoryMetaData[entries.size()])); @@ -60,7 +60,7 @@ public class RepositoriesMetaDataSerializationTests extends AbstractDiffableSeri int numberOfSettings = randomInt(10); Settings.Builder builder = Settings.builder(); for (int i = 0; i < numberOfSettings; i++) { - builder.put(randomAsciiOfLength(10), randomAsciiOfLength(20)); + builder.put(randomAlphaOfLength(10), randomAlphaOfLength(20)); } return builder.build(); } @@ -79,7 +79,7 @@ public class RepositoriesMetaDataSerializationTests extends AbstractDiffableSeri // add some elements int addElements = randomInt(10); for (int i = 0; i < addElements; i++) { - repos.add(new RepositoryMetaData(randomAsciiOfLength(10), randomAsciiOfLength(10), randomSettings())); + repos.add(new RepositoryMetaData(randomAlphaOfLength(10), randomAlphaOfLength(10), randomSettings())); } } return new RepositoriesMetaData(repos.toArray(new RepositoryMetaData[repos.size()])); diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 77a0514a140..45bd369a941 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -657,7 +657,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .setType("mock").setSettings( Settings.builder() .put("location", randomRepoPath()) - .put("random", randomAsciiOfLength(10)) + .put("random", randomAlphaOfLength(10)) .put("random_control_io_exception_rate", 0.2)) .setVerify(false)); @@ -707,7 +707,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .setType("mock").setSettings( Settings.builder() .put("location", randomRepoPath()) - .put("random", randomAsciiOfLength(10)) + .put("random", randomAlphaOfLength(10)) .put("random_data_file_io_exception_rate", 0.3))); createIndex("test-idx"); @@ -792,7 +792,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .setType("mock").setSettings( Settings.builder() .put("location", repositoryLocation) - .put("random", randomAsciiOfLength(10)) + .put("random", randomAlphaOfLength(10)) .put("random_data_file_io_exception_rate", 0.3))); // Test restore after index deletion @@ -833,7 +833,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .setType("mock").setSettings( Settings.builder() .put("location", repositoryLocation) - .put("random", randomAsciiOfLength(10)) + .put("random", randomAlphaOfLength(10)) .put("use_lucene_corruption", true) .put("max_failure_number", 10000000L) .put("random_data_file_io_exception_rate", 1.0))); @@ -879,7 +879,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .setType("mock").setSettings( Settings.builder() .put("location", repositoryLocation) - .put("random", randomAsciiOfLength(10)) + .put("random", randomAlphaOfLength(10)) .put("random_data_file_io_exception_rate", 1.0) // Fail completely )); @@ -1312,7 +1312,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .setType("mock").setSettings( Settings.builder() .put("location", repositoryLocation) - .put("random", randomAsciiOfLength(10)) + .put("random", randomAlphaOfLength(10)) .put("wait_after_unblock", 200))); // Create index on 2 nodes and make sure each node has a primary by setting no replicas @@ -1373,7 +1373,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .setType("mock").setSettings( Settings.builder() .put("location", repositoryLocation) - .put("random", randomAsciiOfLength(10)) + .put("random", randomAlphaOfLength(10)) .put("wait_after_unblock", 200) ).get(); assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); @@ -1569,7 +1569,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .setType("mock").setSettings( Settings.builder() .put("location", repositoryLocation) - .put("random", randomAsciiOfLength(10)) + .put("random", randomAlphaOfLength(10)) .put("wait_after_unblock", 200) ).get(); assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); @@ -2533,7 +2533,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas logger.info("--> take {} snapshot(s)", numSnapshots - 1); final String[] snapshotNames = new String[numSnapshots]; for (int i = 0; i < numSnapshots - 1; i++) { - final String snapshotName = randomAsciiOfLength(8).toLowerCase(Locale.ROOT); + final String snapshotName = randomAlphaOfLength(8).toLowerCase(Locale.ROOT); CreateSnapshotResponse createSnapshotResponse = client.admin() .cluster() .prepareCreateSnapshot(repositoryName, snapshotName) @@ -2550,7 +2550,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas } refresh(); - final String inProgressSnapshot = randomAsciiOfLength(8).toLowerCase(Locale.ROOT); + final String inProgressSnapshot = randomAlphaOfLength(8).toLowerCase(Locale.ROOT); snapshotNames[numSnapshots - 1] = inProgressSnapshot; // block a node so the create snapshot operation can remain in progress final String blockedNode = blockNodeWithIndex(repositoryName, indexName); @@ -2644,7 +2644,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas PutRepositoryResponse putRepositoryResponse = client().admin().cluster().preparePutRepository(repo).setType("mock").setSettings(Settings.builder() .put("location", randomRepoPath()) - .put("random", randomAsciiOfLength(10)) + .put("random", randomAlphaOfLength(10)) .put("wait_after_unblock", 200) ).get(); assertTrue(putRepositoryResponse.isAcknowledged()); @@ -2688,7 +2688,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .put("random_control_io_exception_rate", randomIntBetween(5, 20) / 100f) // test that we can take a snapshot after a failed one, even if a partial index-N was written .put("atomic_move", false) - .put("random", randomAsciiOfLength(10)))); + .put("random", randomAlphaOfLength(10)))); logger.info("--> indexing some data"); assertAcked(prepareCreate("test-idx").setSettings( diff --git a/core/src/test/java/org/elasticsearch/snapshots/SnapshotTests.java b/core/src/test/java/org/elasticsearch/snapshots/SnapshotTests.java index aceefea97f5..3d7277b9c68 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SnapshotTests.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SnapshotTests.java @@ -46,8 +46,8 @@ public class SnapshotTests extends ESTestCase { } public void testSerialization() throws IOException { - final SnapshotId snapshotId = new SnapshotId(randomAsciiOfLength(randomIntBetween(2, 8)), UUIDs.randomBase64UUID()); - final Snapshot original = new Snapshot(randomAsciiOfLength(randomIntBetween(2, 8)), snapshotId); + final SnapshotId snapshotId = new SnapshotId(randomAlphaOfLength(randomIntBetween(2, 8)), UUIDs.randomBase64UUID()); + final Snapshot original = new Snapshot(randomAlphaOfLength(randomIntBetween(2, 8)), snapshotId); final BytesStreamOutput out = new BytesStreamOutput(); original.writeTo(out); assertThat(new Snapshot(out.bytes().streamInput()), equalTo(original)); diff --git a/core/src/test/java/org/elasticsearch/snapshots/SnapshotsInProgressSerializationTests.java b/core/src/test/java/org/elasticsearch/snapshots/SnapshotsInProgressSerializationTests.java index 4b5a9677945..edf3ada587f 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SnapshotsInProgressSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SnapshotsInProgressSerializationTests.java @@ -49,22 +49,22 @@ public class SnapshotsInProgressSerializationTests extends AbstractDiffableWireS } private Entry randomSnapshot() { - Snapshot snapshot = new Snapshot(randomAsciiOfLength(10), new SnapshotId(randomAsciiOfLength(10), randomAsciiOfLength(10))); + Snapshot snapshot = new Snapshot(randomAlphaOfLength(10), new SnapshotId(randomAlphaOfLength(10), randomAlphaOfLength(10))); boolean includeGlobalState = randomBoolean(); boolean partial = randomBoolean(); State state = randomFrom(State.values()); int numberOfIndices = randomIntBetween(0, 10); List indices = new ArrayList<>(); for (int i = 0; i < numberOfIndices; i++) { - indices.add(new IndexId(randomAsciiOfLength(10), randomAsciiOfLength(10))); + indices.add(new IndexId(randomAlphaOfLength(10), randomAlphaOfLength(10))); } long startTime = randomLong(); long repositoryStateId = randomLong(); ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(); int shardsCount = randomIntBetween(0, 10); for (int j = 0; j < shardsCount; j++) { - ShardId shardId = new ShardId(new Index(randomAsciiOfLength(10), randomAsciiOfLength(10)), randomIntBetween(0, 10)); - String nodeId = randomAsciiOfLength(10); + ShardId shardId = new ShardId(new Index(randomAlphaOfLength(10), randomAlphaOfLength(10)), randomIntBetween(0, 10)); + String nodeId = randomAlphaOfLength(10); State shardState = randomFrom(State.values()); builder.put(shardId, new SnapshotsInProgress.ShardSnapshotStatus(nodeId, shardState)); } diff --git a/core/src/test/java/org/elasticsearch/tasks/TaskResultTests.java b/core/src/test/java/org/elasticsearch/tasks/TaskResultTests.java index ff8fcd8fbaf..c4cf7713c3d 100644 --- a/core/src/test/java/org/elasticsearch/tasks/TaskResultTests.java +++ b/core/src/test/java/org/elasticsearch/tasks/TaskResultTests.java @@ -89,10 +89,10 @@ public class TaskResultTests extends ESTestCase { private static TaskInfo randomTaskInfo() throws IOException { TaskId taskId = randomTaskId(); - String type = randomAsciiOfLength(5); - String action = randomAsciiOfLength(5); + String type = randomAlphaOfLength(5); + String action = randomAlphaOfLength(5); Task.Status status = randomBoolean() ? randomRawTaskStatus() : null; - String description = randomBoolean() ? randomAsciiOfLength(5) : null; + String description = randomBoolean() ? randomAlphaOfLength(5) : null; long startTime = randomLong(); long runningTimeNanos = randomLong(); boolean cancellable = randomBoolean(); @@ -101,7 +101,7 @@ public class TaskResultTests extends ESTestCase { } private static TaskId randomTaskId() { - return new TaskId(randomAsciiOfLength(5), randomLong()); + return new TaskId(randomAlphaOfLength(5), randomLong()); } private static RawTaskStatus randomRawTaskStatus() throws IOException { @@ -109,7 +109,7 @@ public class TaskResultTests extends ESTestCase { builder.startObject(); int fields = between(0, 10); for (int f = 0; f < fields; f++) { - builder.field(randomAsciiOfLength(5), randomAsciiOfLength(5)); + builder.field(randomAlphaOfLength(5), randomAlphaOfLength(5)); } builder.endObject(); return new RawTaskStatus(builder.bytes()); @@ -120,7 +120,7 @@ public class TaskResultTests extends ESTestCase { Map result = new TreeMap<>(); int fields = between(0, 10); for (int f = 0; f < fields; f++) { - result.put(randomAsciiOfLength(5), randomAsciiOfLength(5)); + result.put(randomAlphaOfLength(5), randomAlphaOfLength(5)); } return new ToXContent() { @Override diff --git a/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java b/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java index c84fa38edc0..c14d6ec9e05 100644 --- a/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java +++ b/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.transport; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressorFactory; @@ -38,6 +39,8 @@ import java.net.InetSocketAddress; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; import static org.hamcrest.Matchers.equalTo; @@ -152,6 +155,7 @@ public class TCPTransportTests extends ESTestCase { final AtomicBoolean called = new AtomicBoolean(false); Req request = new Req(randomRealisticUnicodeOfLengthBetween(10, 100)); ThreadPool threadPool = new TestThreadPool(TCPTransportTests.class.getName()); + AtomicReference exceptionReference = new AtomicReference<>(); try { TcpTransport transport = new TcpTransport("test", Settings.builder().put("transport.tcp.compress", compressed).build(), threadPool, new BigArrays(Settings.EMPTY, null), null, null, null) { @@ -171,27 +175,31 @@ public class TCPTransportTests extends ESTestCase { } @Override - protected void sendMessage(Object o, BytesReference reference, Runnable sendListener) throws IOException { - StreamInput streamIn = reference.streamInput(); - streamIn.skip(TcpHeader.MARKER_BYTES_SIZE); - int len = streamIn.readInt(); - long requestId = streamIn.readLong(); - assertEquals(42, requestId); - byte status = streamIn.readByte(); - Version version = Version.fromId(streamIn.readInt()); - assertEquals(Version.CURRENT, version); - assertEquals(compressed, TransportStatus.isCompress(status)); - called.compareAndSet(false, true); - if (compressed) { - final int bytesConsumed = TcpHeader.HEADER_SIZE; - streamIn = CompressorFactory.compressor(reference.slice(bytesConsumed, reference.length() - bytesConsumed)) - .streamInput(streamIn); + protected void sendMessage(Object o, BytesReference reference, ActionListener listener) { + try { + StreamInput streamIn = reference.streamInput(); + streamIn.skip(TcpHeader.MARKER_BYTES_SIZE); + int len = streamIn.readInt(); + long requestId = streamIn.readLong(); + assertEquals(42, requestId); + byte status = streamIn.readByte(); + Version version = Version.fromId(streamIn.readInt()); + assertEquals(Version.CURRENT, version); + assertEquals(compressed, TransportStatus.isCompress(status)); + called.compareAndSet(false, true); + if (compressed) { + final int bytesConsumed = TcpHeader.HEADER_SIZE; + streamIn = CompressorFactory.compressor(reference.slice(bytesConsumed, reference.length() - bytesConsumed)) + .streamInput(streamIn); + } + threadPool.getThreadContext().readHeaders(streamIn); + assertEquals("foobar", streamIn.readString()); + Req readReq = new Req(""); + readReq.readFrom(streamIn); + assertEquals(request.value, readReq.value); + } catch (IOException e) { + exceptionReference.set(e); } - threadPool.getThreadContext().readHeaders(streamIn); - assertEquals("foobar", streamIn.readString()); - Req readReq = new Req(""); - readReq.readFrom(streamIn); - assertEquals(request.value, readReq.value); } @Override @@ -219,6 +227,7 @@ public class TCPTransportTests extends ESTestCase { Transport.Connection connection = transport.getConnection(node); connection.sendRequest(42, "foobar", request, TransportRequestOptions.EMPTY); assertTrue(called.get()); + assertNull("IOException while sending message.", exceptionReference.get()); } finally { ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS); } diff --git a/core/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java b/core/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java index e1cfc08dbd0..bb1c70da344 100644 --- a/core/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java +++ b/core/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java @@ -246,4 +246,16 @@ public class TransportActionProxyTests extends ESTestCase { } } + public void testGetAction() { + String action = "foo/bar"; + String proxyAction = TransportActionProxy.getProxyAction(action); + assertTrue(proxyAction.endsWith(action)); + assertEquals("internal:transport/proxy/foo/bar", proxyAction); + } + + public void testUnwrap() { + TransportRequest transportRequest = TransportActionProxy.wrapRequest(nodeA, TransportService.HandshakeRequest.INSTANCE); + assertTrue(transportRequest instanceof TransportActionProxy.ProxyRequest); + assertSame(TransportService.HandshakeRequest.INSTANCE, TransportActionProxy.unwrapRequest(transportRequest)); + } } diff --git a/core/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java b/core/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java index 3b6165adab1..ab882b40316 100644 --- a/core/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java +++ b/core/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java @@ -166,7 +166,7 @@ public class TransportServiceHandshakeTests extends ESTestCase { NetworkHandle handleA = startServices("TS_A", settings, Version.CURRENT); NetworkHandle handleB = startServices("TS_B", settings, Version.CURRENT); DiscoveryNode discoveryNode = new DiscoveryNode( - randomAsciiOfLength(10), + randomAlphaOfLength(10), handleB.discoveryNode.getAddress(), emptyMap(), emptySet(), diff --git a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java index a63d16a9364..9e1d37681be 100644 --- a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java +++ b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java @@ -504,8 +504,8 @@ public class TribeIT extends ESIntegTestCase { public void testMergingCustomMetaData() throws Exception { removeCustomMetaData(cluster1, MergableCustomMetaData1.TYPE); removeCustomMetaData(cluster2, MergableCustomMetaData1.TYPE); - MergableCustomMetaData1 customMetaData1 = new MergableCustomMetaData1(randomAsciiOfLength(10)); - MergableCustomMetaData1 customMetaData2 = new MergableCustomMetaData1(randomAsciiOfLength(10)); + MergableCustomMetaData1 customMetaData1 = new MergableCustomMetaData1(randomAlphaOfLength(10)); + MergableCustomMetaData1 customMetaData2 = new MergableCustomMetaData1(randomAlphaOfLength(10)); List customMetaDatas = Arrays.asList(customMetaData1, customMetaData2); Collections.sort(customMetaDatas, (cm1, cm2) -> cm2.getData().compareTo(cm1.getData())); final MergableCustomMetaData1 tribeNodeCustomMetaData = customMetaDatas.get(0); @@ -521,10 +521,10 @@ public class TribeIT extends ESIntegTestCase { public void testMergingMultipleCustomMetaData() throws Exception { removeCustomMetaData(cluster1, MergableCustomMetaData1.TYPE); removeCustomMetaData(cluster2, MergableCustomMetaData1.TYPE); - MergableCustomMetaData1 firstCustomMetaDataType1 = new MergableCustomMetaData1(randomAsciiOfLength(10)); - MergableCustomMetaData1 secondCustomMetaDataType1 = new MergableCustomMetaData1(randomAsciiOfLength(10)); - MergableCustomMetaData2 firstCustomMetaDataType2 = new MergableCustomMetaData2(randomAsciiOfLength(10)); - MergableCustomMetaData2 secondCustomMetaDataType2 = new MergableCustomMetaData2(randomAsciiOfLength(10)); + MergableCustomMetaData1 firstCustomMetaDataType1 = new MergableCustomMetaData1(randomAlphaOfLength(10)); + MergableCustomMetaData1 secondCustomMetaDataType1 = new MergableCustomMetaData1(randomAlphaOfLength(10)); + MergableCustomMetaData2 firstCustomMetaDataType2 = new MergableCustomMetaData2(randomAlphaOfLength(10)); + MergableCustomMetaData2 secondCustomMetaDataType2 = new MergableCustomMetaData2(randomAlphaOfLength(10)); List mergedCustomMetaDataType1 = Arrays.asList(firstCustomMetaDataType1, secondCustomMetaDataType1); List mergedCustomMetaDataType2 = Arrays.asList(firstCustomMetaDataType2, secondCustomMetaDataType2); Collections.sort(mergedCustomMetaDataType1, (cm1, cm2) -> cm2.getData().compareTo(cm1.getData())); diff --git a/core/src/test/java/org/elasticsearch/update/UpdateNoopIT.java b/core/src/test/java/org/elasticsearch/update/UpdateNoopIT.java index beab15165d6..ae232c2c687 100644 --- a/core/src/test/java/org/elasticsearch/update/UpdateNoopIT.java +++ b/core/src/test/java/org/elasticsearch/update/UpdateNoopIT.java @@ -51,9 +51,9 @@ public class UpdateNoopIT extends ESIntegTestCase { public void testTwoFields() throws Exception { // Use random keys so we get random iteration order. - String key1 = 1 + randomAsciiOfLength(3); - String key2 = 2 + randomAsciiOfLength(3); - String key3 = 3 + randomAsciiOfLength(3); + String key1 = 1 + randomAlphaOfLength(3); + String key2 = 2 + randomAlphaOfLength(3); + String key3 = 3 + randomAlphaOfLength(3); updateAndCheckSource(1, fields(key1, "foo", key2, "baz")); updateAndCheckSource(1, fields(key1, "foo", key2, "baz")); updateAndCheckSource(2, fields(key1, "foo", key2, "bir")); @@ -90,9 +90,9 @@ public class UpdateNoopIT extends ESIntegTestCase { public void testMap() throws Exception { // Use random keys so we get variable iteration order. - String key1 = 1 + randomAsciiOfLength(3); - String key2 = 2 + randomAsciiOfLength(3); - String key3 = 3 + randomAsciiOfLength(3); + String key1 = 1 + randomAlphaOfLength(3); + String key2 = 2 + randomAlphaOfLength(3); + String key3 = 3 + randomAlphaOfLength(3); updateAndCheckSource(1, XContentFactory.jsonBuilder().startObject() .startObject("test") .field(key1, "foo") diff --git a/core/src/test/resources/indices/bwc/index-5.3.0.zip b/core/src/test/resources/indices/bwc/index-5.3.0.zip new file mode 100644 index 00000000000..091f7c1067c Binary files /dev/null and b/core/src/test/resources/indices/bwc/index-5.3.0.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-5.3.0.zip b/core/src/test/resources/indices/bwc/repo-5.3.0.zip new file mode 100644 index 00000000000..81a31dcb2a4 Binary files /dev/null and b/core/src/test/resources/indices/bwc/repo-5.3.0.zip differ diff --git a/distribution/build.gradle b/distribution/build.gradle index e95ccf09320..2a8094eb800 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -40,7 +40,8 @@ buildscript { } } -Collection distributions = project.subprojects.findAll { it.path.contains(':tools') == false } +Collection distributions = project.subprojects.findAll { + it.path.contains(':tools') == false && it.name != 'bwc-zip' } /***************************************************************************** * Notice file * @@ -82,7 +83,7 @@ project.rootProject.subprojects.findAll { it.path.startsWith(':modules:') }.each } // We would like to make sure integ tests for the distribution run after // integ tests for the modules included in the distribution. - project.configure(distributions.findAll { it.name != 'integ-test-zip' }) { Project distribution -> + project.configure(distributions.findAll { it.name != 'integ-test-zip'}) { Project distribution -> distribution.afterEvaluate({ // some integTest tasks will have multiple finalizers distribution.integTest.mustRunAfter module.tasks.find { t -> t.name.matches(".*integTest\$") } diff --git a/distribution/bwc-zip/build.gradle b/distribution/bwc-zip/build.gradle new file mode 100644 index 00000000000..4370fae11fd --- /dev/null +++ b/distribution/bwc-zip/build.gradle @@ -0,0 +1,89 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import java.util.regex.Matcher +import org.elasticsearch.gradle.LoggedExec + +/** + * This is a dummy project which does a local worktree checkout of the previous + * major version's stable branch, and builds a snapshot. This allows backcompat + * tests in the next major version to test against the next unreleased minor + * version, without relying on snapshots. + */ + +apply plugin: 'distribution' + +File checkoutDir = file("${buildDir}/bwc/checkout-5.x") +task createClone(type: LoggedExec) { + onlyIf { checkoutDir.exists() == false } + commandLine = ['git', 'clone', rootDir, checkoutDir] +} + +// we use regular Exec here to ensure we always get output, regardless of logging level +task findUpstream(type: Exec) { + dependsOn createClone + workingDir = checkoutDir + commandLine = ['git', 'remote', '-v'] + ignoreExitValue = true + ByteArrayOutputStream output = new ByteArrayOutputStream() + standardOutput = output + doLast { + if (execResult.exitValue != 0) { + output.toString('UTF-8').eachLine { line -> logger.error(line) } + execResult.assertNormalExitValue() + } + project.ext.upstreamExists = false + output.toString('UTF-8').eachLine { + if (it.contains("upstream")) { + project.ext.upstreamExists = true + } + } + } +} + +task addUpstream(type: LoggedExec) { + dependsOn findUpstream + onlyIf { project.ext.upstreamExists == false } + workingDir = checkoutDir + commandLine = ['git', 'remote', 'add', 'upstream', 'https://github.com/elastic/elasticsearch.git'] +} + +task fetchLatest(type: LoggedExec) { + dependsOn addUpstream + workingDir = checkoutDir + commandLine = ['git', 'fetch', 'upstream'] +} + +task checkoutBwcBranch(type: LoggedExec) { + dependsOn fetchLatest + workingDir = checkoutDir + commandLine = ['git', 'checkout', 'upstream/5.x'] +} + +File bwcZip = file("${checkoutDir}/distribution/zip/build/distributions/elasticsearch-${bwcVersion}.zip") +task buildBwcVersion(type: GradleBuild) { + dependsOn checkoutBwcBranch + dir = checkoutDir + tasks = [':distribution:zip:assemble'] +} + +artifacts { + 'default' file: bwcZip, name: 'elasticsearch', type: 'zip', builtBy: buildBwcVersion +} + diff --git a/docs/build.gradle b/docs/build.gradle index f8e5ff0dc25..aa24a3ac8a7 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -24,8 +24,6 @@ apply plugin: 'elasticsearch.docs-test' * only remove entries from this list. When it is empty we'll remove it * entirely and have a party! There will be cake and everything.... */ buildRestTests.expectedUnconvertedCandidates = [ - 'reference/aggregations/bucket/geodistance-aggregation.asciidoc', - 'reference/aggregations/bucket/geohashgrid-aggregation.asciidoc', 'reference/aggregations/bucket/iprange-aggregation.asciidoc', 'reference/aggregations/bucket/missing-aggregation.asciidoc', 'reference/aggregations/bucket/nested-aggregation.asciidoc', @@ -36,8 +34,6 @@ buildRestTests.expectedUnconvertedCandidates = [ 'reference/aggregations/matrix/stats-aggregation.asciidoc', 'reference/aggregations/metrics/cardinality-aggregation.asciidoc', 'reference/aggregations/metrics/extendedstats-aggregation.asciidoc', - 'reference/aggregations/metrics/geobounds-aggregation.asciidoc', - 'reference/aggregations/metrics/geocentroid-aggregation.asciidoc', 'reference/aggregations/metrics/percentile-aggregation.asciidoc', 'reference/aggregations/metrics/percentile-rank-aggregation.asciidoc', 'reference/aggregations/metrics/scripted-metric-aggregation.asciidoc', @@ -57,22 +53,6 @@ buildRestTests.expectedUnconvertedCandidates = [ 'reference/aggregations/pipeline/serial-diff-aggregation.asciidoc', 'reference/aggregations/pipeline/stats-bucket-aggregation.asciidoc', 'reference/aggregations/pipeline/sum-bucket-aggregation.asciidoc', - 'reference/analysis/analyzers/lang-analyzer.asciidoc', - 'reference/analysis/analyzers/pattern-analyzer.asciidoc', - 'reference/analysis/charfilters/htmlstrip-charfilter.asciidoc', - 'reference/analysis/charfilters/pattern-replace-charfilter.asciidoc', - 'reference/analysis/tokenfilters/asciifolding-tokenfilter.asciidoc', - 'reference/analysis/tokenfilters/cjk-bigram-tokenfilter.asciidoc', - 'reference/analysis/tokenfilters/common-grams-tokenfilter.asciidoc', - 'reference/analysis/tokenfilters/compound-word-tokenfilter.asciidoc', - 'reference/analysis/tokenfilters/elision-tokenfilter.asciidoc', - 'reference/analysis/tokenfilters/hunspell-tokenfilter.asciidoc', - 'reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc', - 'reference/analysis/tokenfilters/keep-words-tokenfilter.asciidoc', - 'reference/analysis/tokenfilters/keyword-marker-tokenfilter.asciidoc', - 'reference/analysis/tokenfilters/keyword-repeat-tokenfilter.asciidoc', - 'reference/analysis/tokenfilters/limit-token-count-tokenfilter.asciidoc', - 'reference/analysis/tokenfilters/lowercase-tokenfilter.asciidoc', 'reference/cat/snapshots.asciidoc', 'reference/cat/templates.asciidoc', 'reference/cat/thread_pool.asciidoc', @@ -103,18 +83,13 @@ buildRestTests.expectedUnconvertedCandidates = [ 'reference/mapping/fields/all-field.asciidoc', 'reference/mapping/params/analyzer.asciidoc', 'reference/mapping/types/binary.asciidoc', - 'reference/mapping/types/geo-point.asciidoc', - 'reference/mapping/types/geo-shape.asciidoc', 'reference/mapping/types/ip.asciidoc', 'reference/mapping/types/nested.asciidoc', 'reference/mapping/types/object.asciidoc', 'reference/mapping/types/percolator.asciidoc', 'reference/modules/scripting/security.asciidoc', - 'reference/modules/scripting/using.asciidoc', 'reference/modules/cross-cluster-search.asciidoc', // this is hard to test since we need 2 clusters -- maybe we can trick it into referencing itself... - 'reference/query-dsl/exists-query.asciidoc', 'reference/query-dsl/function-score-query.asciidoc', - 'reference/query-dsl/geo-shape-query.asciidoc', 'reference/search/field-stats.asciidoc', 'reference/search/profile.asciidoc', 'reference/search/request/highlighting.asciidoc', @@ -131,15 +106,20 @@ integTestCluster { Closure configFile = { extraConfigFile it, "src/test/cluster/config/$it" } + configFile 'scripts/calculate_score.painless' configFile 'scripts/my_script.painless' configFile 'scripts/my_init_script.painless' configFile 'scripts/my_map_script.painless' configFile 'scripts/my_combine_script.painless' configFile 'scripts/my_reduce_script.painless' + configFile 'analysis/example_word_list.txt' + configFile 'analysis/hyphenation_patterns.xml' configFile 'analysis/synonym.txt' configFile 'analysis/stemmer_override.txt' configFile 'userdict_ja.txt' configFile 'KeywordTokenizer.rbbi' + extraConfigFile 'hunspell/en_US/en_US.aff', '../core/src/test/resources/indices/analyze/conf_dir/hunspell/en_US/en_US.aff' + extraConfigFile 'hunspell/en_US/en_US.dic', '../core/src/test/resources/indices/analyze/conf_dir/hunspell/en_US/en_US.dic' // Whitelist reindexing from the local node so we can test it. setting 'reindex.remote.whitelist', '127.0.0.1:*' } diff --git a/docs/plugins/plugin-script.asciidoc b/docs/plugins/plugin-script.asciidoc index e57a452ba52..9eb6cae1169 100644 --- a/docs/plugins/plugin-script.asciidoc +++ b/docs/plugins/plugin-script.asciidoc @@ -187,8 +187,8 @@ Or on Windows: [source,shell] ------------------------------------ -set ES_JAVA_OPTS="-DproxyHost=host_name -DproxyPort=port_number -Dhttps.proxyHost=host_name -Dhttps.proxyPort=https_port_number" -bin/elasticsearch-plugin install analysis-icu +set ES_JAVA_OPTS="-Dhttp.proxyHost=host_name -Dhttp.proxyPort=port_number -Dhttps.proxyHost=host_name -Dhttps.proxyPort=https_port_number" +bin\elasticsearch-plugin install analysis-icu ------------------------------------ === Plugins directory diff --git a/docs/plugins/repository-azure.asciidoc b/docs/plugins/repository-azure.asciidoc index e6d8c682ba7..ab2f9dd05e9 100644 --- a/docs/plugins/repository-azure.asciidoc +++ b/docs/plugins/repository-azure.asciidoc @@ -72,6 +72,12 @@ It's not set by default which means that elasticsearch is using the http://azure.github.io/azure-storage-java/com/microsoft/azure/storage/RequestOptions.html#setTimeoutIntervalInMs(java.lang.Integer)[default value] set by the azure client (known as 5 minutes). +`max_retries` can help to control the exponential backoff policy. It will fix the number of retries +in case of failures before considering the snapshot is failing. Defaults to `3` retries. +The initial backoff period is defined by Azure SDK as `30s`. Which means `30s` of wait time +before retrying after a first timeout or failure. The maximum backoff period is defined by Azure SDK as +`90s`. + [source,yaml] ---- cloud: @@ -82,13 +88,15 @@ cloud: account: your_azure_storage_account1 key: your_azure_storage_key1 default: true + max_retries: 7 my_account2: account: your_azure_storage_account2 key: your_azure_storage_key2 timeout: 30s ---- -In this example, timeout will be 10s for `my_account1` and 30s for `my_account2`. +In this example, timeout will be `10s` per try for `my_account1` with `7` retries before failing +and `30s` per try for `my_account2` with `3` retries. [[repository-azure-repository-settings]] ===== Repository settings @@ -185,22 +193,6 @@ client.admin().cluster().preparePutRepository("my_backup_java1") ).get(); ---- -[[repository-azure-global-settings]] -===== Global repositories settings - -All those repository settings can also be defined globally in `elasticsearch.yml` file using prefix -`repositories.azure.`. For example: - -[source,yaml] ----- -repositories.azure: - container: backup-container - base_path: backups - chunk_size: 32m - compress": true ----- - - [[repository-azure-validation]] ===== Repository validation rules diff --git a/docs/reference/aggregations/bucket/geodistance-aggregation.asciidoc b/docs/reference/aggregations/bucket/geodistance-aggregation.asciidoc index a051fc00c4f..d3d13d4ac6f 100644 --- a/docs/reference/aggregations/bucket/geodistance-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/geodistance-aggregation.asciidoc @@ -5,6 +5,34 @@ A multi-bucket aggregation that works on `geo_point` fields and conceptually wor [source,js] -------------------------------------------------- +PUT /museums +{ + "mappings": { + "doc": { + "properties": { + "location": { + "type": "geo_point" + } + } + } + } +} + +POST /museums/doc/_bulk?refresh +{"index":{"_id":1}} +{"location": "52.374081,4.912350", "name": "NEMO Science Museum"} +{"index":{"_id":2}} +{"location": "52.369219,4.901618", "name": "Museum Het Rembrandthuis"} +{"index":{"_id":3}} +{"location": "52.371667,4.914722", "name": "Nederlands Scheepvaartmuseum"} +{"index":{"_id":4}} +{"location": "51.222900,4.405200", "name": "Letterenhuis"} +{"index":{"_id":5}} +{"location": "48.861111,2.336389", "name": "Musée du Louvre"} +{"index":{"_id":6}} +{"location": "48.860000,2.327000", "name": "Musée d'Orsay"} + +POST /museums/_search?size=0 { "aggs" : { "rings_around_amsterdam" : { @@ -12,46 +40,49 @@ A multi-bucket aggregation that works on `geo_point` fields and conceptually wor "field" : "location", "origin" : "52.3760, 4.894", "ranges" : [ - { "to" : 100 }, - { "from" : 100, "to" : 300 }, - { "from" : 300 } + { "to" : 100000 }, + { "from" : 100000, "to" : 300000 }, + { "from" : 300000 } ] } } } } -------------------------------------------------- +// CONSOLE Response: [source,js] -------------------------------------------------- { + ... "aggregations": { - "rings" : { + "rings_around_amsterdam" : { "buckets": [ { - "key": "*-100.0", - "from": 0, - "to": 100.0, + "key": "*-100000.0", + "from": 0.0, + "to": 100000.0, "doc_count": 3 }, { - "key": "100.0-300.0", - "from": 100.0, - "to": 300.0, + "key": "100000.0-300000.0", + "from": 100000.0, + "to": 300000.0, "doc_count": 1 }, { - "key": "300.0-*", - "from": 300.0, - "doc_count": 7 + "key": "300000.0-*", + "from": 300000.0, + "doc_count": 2 } ] } } } -------------------------------------------------- +// TESTRESPONSE[s/\.\.\./"took": $body.took,"_shards": $body._shards,"hits":$body.hits,"timed_out":false,/] The specified field must be of type `geo_point` (which can only be set explicitly in the mappings). And it can also hold an array of `geo_point` fields, in which case all will be taken into account during aggregation. The origin point can accept all formats supported by the <>: @@ -59,17 +90,18 @@ The specified field must be of type `geo_point` (which can only be set explicitl * String format: `"52.3760, 4.894"` - where the first number is the `lat` and the second is the `lon` * Array format: `[4.894, 52.3760]` - which is based on the `GeoJson` standard and where the first number is the `lon` and the second one is the `lat` -By default, the distance unit is `m` (metres) but it can also accept: `mi` (miles), `in` (inches), `yd` (yards), `km` (kilometers), `cm` (centimeters), `mm` (millimeters). +By default, the distance unit is `m` (meters) but it can also accept: `mi` (miles), `in` (inches), `yd` (yards), `km` (kilometers), `cm` (centimeters), `mm` (millimeters). [source,js] -------------------------------------------------- +POST /museums/_search?size=0 { "aggs" : { "rings" : { "geo_distance" : { "field" : "location", "origin" : "52.3760, 4.894", - "unit" : "mi", <1> + "unit" : "km", <1> "ranges" : [ { "to" : 100 }, { "from" : 100, "to" : 300 }, @@ -80,19 +112,23 @@ By default, the distance unit is `m` (metres) but it can also accept: `mi` (mile } } -------------------------------------------------- +// CONSOLE +// TEST[continued] -<1> The distances will be computed as miles +<1> The distances will be computed in kilometers There are two distance calculation modes: `arc` (the default), and `plane`. The `arc` calculation is the most accurate. The `plane` is the fastest but least accurate. Consider using `plane` when your search context is "narrow", and spans smaller geographical areas (~5km). `plane` will return higher error margins for searches across very large areas (e.g. cross continent search). The distance calculation type can be set using the `distance_type` parameter: [source,js] -------------------------------------------------- +POST /museums/_search?size=0 { "aggs" : { "rings" : { "geo_distance" : { "field" : "location", "origin" : "52.3760, 4.894", + "unit" : "km", "distance_type" : "plane", "ranges" : [ { "to" : 100 }, @@ -104,3 +140,5 @@ There are two distance calculation modes: `arc` (the default), and `plane`. The } } -------------------------------------------------- +// CONSOLE +// TEST[continued] diff --git a/docs/reference/aggregations/bucket/geohashgrid-aggregation.asciidoc b/docs/reference/aggregations/bucket/geohashgrid-aggregation.asciidoc index 17ca509e3c5..84f70185aa9 100644 --- a/docs/reference/aggregations/bucket/geohashgrid-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/geohashgrid-aggregation.asciidoc @@ -19,9 +19,37 @@ The specified field must be of type `geo_point` (which can only be set explicitl [source,js] -------------------------------------------------- +PUT /museums +{ + "mappings": { + "doc": { + "properties": { + "location": { + "type": "geo_point" + } + } + } + } +} + +POST /museums/doc/_bulk?refresh +{"index":{"_id":1}} +{"location": "52.374081,4.912350", "name": "NEMO Science Museum"} +{"index":{"_id":2}} +{"location": "52.369219,4.901618", "name": "Museum Het Rembrandthuis"} +{"index":{"_id":3}} +{"location": "52.371667,4.914722", "name": "Nederlands Scheepvaartmuseum"} +{"index":{"_id":4}} +{"location": "51.222900,4.405200", "name": "Letterenhuis"} +{"index":{"_id":5}} +{"location": "48.861111,2.336389", "name": "Musée du Louvre"} +{"index":{"_id":6}} +{"location": "48.860000,2.327000", "name": "Musée d'Orsay"} + +POST /museums/_search?size=0 { "aggregations" : { - "myLarge-GrainGeoHashGrid" : { + "large-grid" : { "geohash_grid" : { "field" : "location", "precision" : 3 @@ -30,30 +58,35 @@ The specified field must be of type `geo_point` (which can only be set explicitl } } -------------------------------------------------- +// CONSOLE Response: [source,js] -------------------------------------------------- { + ... "aggregations": { - "myLarge-GrainGeoHashGrid": { + "large-grid": { "buckets": [ { - "key": "svz", - "doc_count": 10964 + "key": "u17", + "doc_count": 3 }, { - "key": "sv8", - "doc_count": 3198 + "key": "u09", + "doc_count": 2 + }, + { + "key": "u15", + "doc_count": 1 } ] } } } -------------------------------------------------- - - +// TESTRESPONSE[s/\.\.\./"took": $body.took,"_shards": $body._shards,"hits":$body.hits,"timed_out":false,/] ==== High-precision requests @@ -61,29 +94,32 @@ When requesting detailed buckets (typically for displaying a "zoomed in" map) a [source,js] -------------------------------------------------- +POST /museums/_search?size=0 { "aggregations" : { - "zoomedInView" : { + "zoomed-in" : { "filter" : { "geo_bounding_box" : { "location" : { - "top_left" : "51.73, 0.9", - "bottom_right" : "51.55, 1.1" + "top_left" : "52.4, 4.9", + "bottom_right" : "52.3, 5.0" } } }, "aggregations":{ "zoom1":{ "geohash_grid" : { - "field":"location", - "precision":8 + "field": "location", + "precision": 8 } } } } } - } +} -------------------------------------------------- +// CONSOLE +// TEST[continued] ==== Cell dimensions at the equator The table below shows the metric dimensions for cells covered by various string lengths of geohash. diff --git a/docs/reference/aggregations/metrics/geobounds-aggregation.asciidoc b/docs/reference/aggregations/metrics/geobounds-aggregation.asciidoc index ade59477ee3..4d78e0c3082 100644 --- a/docs/reference/aggregations/metrics/geobounds-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/geobounds-aggregation.asciidoc @@ -8,9 +8,37 @@ Example: [source,js] -------------------------------------------------- +PUT /museums +{ + "mappings": { + "doc": { + "properties": { + "location": { + "type": "geo_point" + } + } + } + } +} + +POST /museums/doc/_bulk?refresh +{"index":{"_id":1}} +{"location": "52.374081,4.912350", "name": "NEMO Science Museum"} +{"index":{"_id":2}} +{"location": "52.369219,4.901618", "name": "Museum Het Rembrandthuis"} +{"index":{"_id":3}} +{"location": "52.371667,4.914722", "name": "Nederlands Scheepvaartmuseum"} +{"index":{"_id":4}} +{"location": "51.222900,4.405200", "name": "Letterenhuis"} +{"index":{"_id":5}} +{"location": "48.861111,2.336389", "name": "Musée du Louvre"} +{"index":{"_id":6}} +{"location": "48.860000,2.327000", "name": "Musée d'Orsay"} + +POST /museums/_search?size=0 { "query" : { - "match" : { "business_type" : "shop" } + "match" : { "name" : "musée" } }, "aggs" : { "viewport" : { @@ -22,6 +50,7 @@ Example: } } -------------------------------------------------- +// CONSOLE <1> The `geo_bounds` aggregation specifies the field to use to obtain the bounds <2> `wrap_longitude` is an optional parameter which specifies whether the bounding box should be allowed to overlap the international date line. The default value is `true` @@ -34,20 +63,20 @@ The response for the above aggregation: -------------------------------------------------- { ... - "aggregations": { "viewport": { "bounds": { "top_left": { - "lat": 80.45, - "lon": -160.22 + "lat": 48.86111099738628, + "lon": 2.3269999679178 }, "bottom_right": { - "lat": 40.65, - "lon": 42.57 + "lat": 48.85999997612089, + "lon": 2.3363889567553997 } } } } } -------------------------------------------------- +// TESTRESPONSE[s/\.\.\./"took": $body.took,"_shards": $body._shards,"hits":$body.hits,"timed_out":false,/] diff --git a/docs/reference/aggregations/metrics/geocentroid-aggregation.asciidoc b/docs/reference/aggregations/metrics/geocentroid-aggregation.asciidoc index 8f871dc8dbc..89aa091bba2 100644 --- a/docs/reference/aggregations/metrics/geocentroid-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/geocentroid-aggregation.asciidoc @@ -3,15 +3,39 @@ A metric aggregation that computes the weighted centroid from all coordinate values for a <> field. - Example: [source,js] -------------------------------------------------- +PUT /museums +{ + "mappings": { + "doc": { + "properties": { + "location": { + "type": "geo_point" + } + } + } + } +} + +POST /museums/doc/_bulk?refresh +{"index":{"_id":1}} +{"location": "52.374081,4.912350", "city": "Amsterdam", "name": "NEMO Science Museum"} +{"index":{"_id":2}} +{"location": "52.369219,4.901618", "city": "Amsterdam", "name": "Museum Het Rembrandthuis"} +{"index":{"_id":3}} +{"location": "52.371667,4.914722", "city": "Amsterdam", "name": "Nederlands Scheepvaartmuseum"} +{"index":{"_id":4}} +{"location": "51.222900,4.405200", "city": "Antwerp", "name": "Letterenhuis"} +{"index":{"_id":5}} +{"location": "48.861111,2.336389", "city": "Paris", "name": "Musée du Louvre"} +{"index":{"_id":6}} +{"location": "48.860000,2.327000", "city": "Paris", "name": "Musée d'Orsay"} + +POST /museums/_search?size=0 { - "query" : { - "match" : { "crime" : "burglary" } - }, "aggs" : { "centroid" : { "geo_centroid" : { @@ -21,6 +45,7 @@ Example: } } -------------------------------------------------- +// CONSOLE <1> The `geo_centroid` aggregation specifies the field to use for computing the centroid. (NOTE: field must be a <> type) @@ -32,18 +57,17 @@ The response for the above aggregation: -------------------------------------------------- { ... - "aggregations": { "centroid": { "location": { - "lat": 80.45, - "lon": -160.22 + "lat": 51.009829603135586, + "lon": 3.966213036328554 } } } } -------------------------------------------------- - +// TESTRESPONSE[s/\.\.\./"took": $body.took,"_shards": $body._shards,"hits":$body.hits,"timed_out":false,/] The `geo_centroid` aggregation is more interesting when combined as a sub-aggregation to other bucket aggregations. @@ -51,13 +75,11 @@ Example: [source,js] -------------------------------------------------- +POST /museums/_search?size=0 { - "query" : { - "match" : { "crime" : "burglary" } - }, "aggs" : { - "towns" : { - "terms" : { "field" : "town" }, + "cities" : { + "terms" : { "field" : "city.keyword" }, "aggs" : { "centroid" : { "geo_centroid" : { "field" : "location" } @@ -67,9 +89,12 @@ Example: } } -------------------------------------------------- +// CONSOLE +// TEST[continued] -The above example uses `geo_centroid` as a sub-aggregation to a <> bucket aggregation -for finding the central location for all crimes of type burglary in each town. +The above example uses `geo_centroid` as a sub-aggregation to a +<> bucket aggregation +for finding the central location for museums in each city. The response for the above aggregation: @@ -77,28 +102,44 @@ The response for the above aggregation: -------------------------------------------------- { ... - - "buckets": [ - { - "key": "Los Altos", - "doc_count": 113, - "centroid": { - "location": { - "lat": 37.3924582824111, - "lon": -122.12104808539152 - } - } - }, - { - "key": "Mountain View", - "doc_count": 92, - "centroid": { - "location": { - "lat": 37.382152481004596, - "lon": -122.08116559311748 - } - } + "aggregations": { + "cities": { + "sum_other_doc_count": 0, + "doc_count_error_upper_bound": 0, + "buckets": [ + { + "key": "Amsterdam", + "doc_count": 3, + "centroid": { + "location": { + "lat": 52.371655656024814, + "lon": 4.909563269466162 + } + } + }, + { + "key": "Paris", + "doc_count": 2, + "centroid": { + "location": { + "lat": 48.86055544484407, + "lon": 2.331694420427084 + } + } + }, + { + "key": "Antwerp", + "doc_count": 1, + "centroid": { + "location": { + "lat": 51.222899928689, + "lon": 4.405199903994799 + } + } + } + ] } - ] + } } --------------------------------------------------- \ No newline at end of file +-------------------------------------------------- +// TESTRESPONSE[s/\.\.\./"took": $body.took,"_shards": $body._shards,"hits":$body.hits,"timed_out":false,/] diff --git a/docs/reference/analysis/analyzers/lang-analyzer.asciidoc b/docs/reference/analysis/analyzers/lang-analyzer.asciidoc index 02843789c4b..65cc30780b1 100644 --- a/docs/reference/analysis/analyzers/lang-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/lang-analyzer.asciidoc @@ -77,6 +77,7 @@ The `arabic` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /arabic_example { "settings": { "analysis": { @@ -87,7 +88,7 @@ The `arabic` analyzer could be reimplemented as a `custom` analyzer as follows: }, "arabic_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["مثال"] <2> }, "arabic_stemmer": { "type": "stemmer", @@ -110,6 +111,7 @@ The `arabic` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -122,6 +124,7 @@ The `armenian` analyzer could be reimplemented as a `custom` analyzer as follows [source,js] ---------------------------------------------------- +PUT /armenian_example { "settings": { "analysis": { @@ -132,7 +135,7 @@ The `armenian` analyzer could be reimplemented as a `custom` analyzer as follows }, "armenian_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["օրինակ"] <2> }, "armenian_stemmer": { "type": "stemmer", @@ -154,6 +157,7 @@ The `armenian` analyzer could be reimplemented as a `custom` analyzer as follows } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -166,6 +170,7 @@ The `basque` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /armenian_example { "settings": { "analysis": { @@ -176,7 +181,7 @@ The `basque` analyzer could be reimplemented as a `custom` analyzer as follows: }, "basque_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["Adibidez"] <2> }, "basque_stemmer": { "type": "stemmer", @@ -198,6 +203,7 @@ The `basque` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -210,6 +216,7 @@ The `brazilian` analyzer could be reimplemented as a `custom` analyzer as follow [source,js] ---------------------------------------------------- +PUT /brazilian_example { "settings": { "analysis": { @@ -220,7 +227,7 @@ The `brazilian` analyzer could be reimplemented as a `custom` analyzer as follow }, "brazilian_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["exemplo"] <2> }, "brazilian_stemmer": { "type": "stemmer", @@ -242,6 +249,7 @@ The `brazilian` analyzer could be reimplemented as a `custom` analyzer as follow } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -254,6 +262,7 @@ The `bulgarian` analyzer could be reimplemented as a `custom` analyzer as follow [source,js] ---------------------------------------------------- +PUT /bulgarian_example { "settings": { "analysis": { @@ -264,7 +273,7 @@ The `bulgarian` analyzer could be reimplemented as a `custom` analyzer as follow }, "bulgarian_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["пример"] <2> }, "bulgarian_stemmer": { "type": "stemmer", @@ -286,6 +295,7 @@ The `bulgarian` analyzer could be reimplemented as a `custom` analyzer as follow } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -298,6 +308,7 @@ The `catalan` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /catalan_example { "settings": { "analysis": { @@ -312,7 +323,7 @@ The `catalan` analyzer could be reimplemented as a `custom` analyzer as follows: }, "catalan_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["exemple"] <2> }, "catalan_stemmer": { "type": "stemmer", @@ -335,6 +346,7 @@ The `catalan` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -347,6 +359,7 @@ The `cjk` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /cjk_example { "settings": { "analysis": { @@ -371,6 +384,7 @@ The `cjk` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. @@ -381,6 +395,7 @@ The `czech` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /czech_example { "settings": { "analysis": { @@ -391,7 +406,7 @@ The `czech` analyzer could be reimplemented as a `custom` analyzer as follows: }, "czech_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["příklad"] <2> }, "czech_stemmer": { "type": "stemmer", @@ -413,6 +428,7 @@ The `czech` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -425,6 +441,7 @@ The `danish` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /danish_example { "settings": { "analysis": { @@ -435,7 +452,7 @@ The `danish` analyzer could be reimplemented as a `custom` analyzer as follows: }, "danish_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["eksempel"] <2> }, "danish_stemmer": { "type": "stemmer", @@ -457,6 +474,7 @@ The `danish` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -469,6 +487,7 @@ The `dutch` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /detch_example { "settings": { "analysis": { @@ -479,7 +498,7 @@ The `dutch` analyzer could be reimplemented as a `custom` analyzer as follows: }, "dutch_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["voorbeeld"] <2> }, "dutch_stemmer": { "type": "stemmer", @@ -511,6 +530,7 @@ The `dutch` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -523,6 +543,7 @@ The `english` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /english_example { "settings": { "analysis": { @@ -533,7 +554,7 @@ The `english` analyzer could be reimplemented as a `custom` analyzer as follows: }, "english_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["example"] <2> }, "english_stemmer": { "type": "stemmer", @@ -560,6 +581,7 @@ The `english` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -572,6 +594,7 @@ The `finnish` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /finnish_example { "settings": { "analysis": { @@ -582,7 +605,7 @@ The `finnish` analyzer could be reimplemented as a `custom` analyzer as follows: }, "finnish_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["esimerkki"] <2> }, "finnish_stemmer": { "type": "stemmer", @@ -604,6 +627,7 @@ The `finnish` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -616,6 +640,7 @@ The `french` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /french_example { "settings": { "analysis": { @@ -635,7 +660,7 @@ The `french` analyzer could be reimplemented as a `custom` analyzer as follows: }, "french_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["Exemple"] <2> }, "french_stemmer": { "type": "stemmer", @@ -658,6 +683,7 @@ The `french` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -670,6 +696,7 @@ The `galician` analyzer could be reimplemented as a `custom` analyzer as follows [source,js] ---------------------------------------------------- +PUT /galician_example { "settings": { "analysis": { @@ -680,7 +707,7 @@ The `galician` analyzer could be reimplemented as a `custom` analyzer as follows }, "galician_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["exemplo"] <2> }, "galician_stemmer": { "type": "stemmer", @@ -702,6 +729,7 @@ The `galician` analyzer could be reimplemented as a `custom` analyzer as follows } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -714,6 +742,7 @@ The `german` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /german_example { "settings": { "analysis": { @@ -724,7 +753,7 @@ The `german` analyzer could be reimplemented as a `custom` analyzer as follows: }, "german_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["Beispiel"] <2> }, "german_stemmer": { "type": "stemmer", @@ -747,6 +776,7 @@ The `german` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -759,6 +789,7 @@ The `greek` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /greek_example { "settings": { "analysis": { @@ -773,7 +804,7 @@ The `greek` analyzer could be reimplemented as a `custom` analyzer as follows: }, "greek_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["παράδειγμα"] <2> }, "greek_stemmer": { "type": "stemmer", @@ -795,6 +826,7 @@ The `greek` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -807,6 +839,7 @@ The `hindi` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /hindi_example { "settings": { "analysis": { @@ -817,7 +850,7 @@ The `hindi` analyzer could be reimplemented as a `custom` analyzer as follows: }, "hindi_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["उदाहरण"] <2> }, "hindi_stemmer": { "type": "stemmer", @@ -841,6 +874,7 @@ The `hindi` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -853,6 +887,7 @@ The `hungarian` analyzer could be reimplemented as a `custom` analyzer as follow [source,js] ---------------------------------------------------- +PUT /hungarian_example { "settings": { "analysis": { @@ -863,7 +898,7 @@ The `hungarian` analyzer could be reimplemented as a `custom` analyzer as follow }, "hungarian_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["példa"] <2> }, "hungarian_stemmer": { "type": "stemmer", @@ -885,6 +920,7 @@ The `hungarian` analyzer could be reimplemented as a `custom` analyzer as follow } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -898,6 +934,7 @@ The `indonesian` analyzer could be reimplemented as a `custom` analyzer as follo [source,js] ---------------------------------------------------- +PUT /indonesian_example { "settings": { "analysis": { @@ -908,7 +945,7 @@ The `indonesian` analyzer could be reimplemented as a `custom` analyzer as follo }, "indonesian_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["contoh"] <2> }, "indonesian_stemmer": { "type": "stemmer", @@ -930,6 +967,7 @@ The `indonesian` analyzer could be reimplemented as a `custom` analyzer as follo } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -942,6 +980,7 @@ The `irish` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /irish_example { "settings": { "analysis": { @@ -960,7 +999,7 @@ The `irish` analyzer could be reimplemented as a `custom` analyzer as follows: }, "irish_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["sampla"] <2> }, "irish_stemmer": { "type": "stemmer", @@ -983,6 +1022,7 @@ The `irish` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -995,6 +1035,7 @@ The `italian` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /italian_example { "settings": { "analysis": { @@ -1014,7 +1055,7 @@ The `italian` analyzer could be reimplemented as a `custom` analyzer as follows: }, "italian_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["esempio"] <2> }, "italian_stemmer": { "type": "stemmer", @@ -1037,6 +1078,7 @@ The `italian` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -1049,6 +1091,7 @@ The `latvian` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /latvian_example { "settings": { "analysis": { @@ -1059,7 +1102,7 @@ The `latvian` analyzer could be reimplemented as a `custom` analyzer as follows: }, "latvian_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["piemērs"] <2> }, "latvian_stemmer": { "type": "stemmer", @@ -1081,6 +1124,7 @@ The `latvian` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -1093,6 +1137,7 @@ The `lithuanian` analyzer could be reimplemented as a `custom` analyzer as follo [source,js] ---------------------------------------------------- +PUT /lithuanian_example { "settings": { "analysis": { @@ -1103,7 +1148,7 @@ The `lithuanian` analyzer could be reimplemented as a `custom` analyzer as follo }, "lithuanian_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["pavyzdys"] <2> }, "lithuanian_stemmer": { "type": "stemmer", @@ -1125,6 +1170,7 @@ The `lithuanian` analyzer could be reimplemented as a `custom` analyzer as follo } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -1137,6 +1183,7 @@ The `norwegian` analyzer could be reimplemented as a `custom` analyzer as follow [source,js] ---------------------------------------------------- +PUT /norwegian_example { "settings": { "analysis": { @@ -1147,7 +1194,7 @@ The `norwegian` analyzer could be reimplemented as a `custom` analyzer as follow }, "norwegian_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["eksempel"] <2> }, "norwegian_stemmer": { "type": "stemmer", @@ -1169,6 +1216,7 @@ The `norwegian` analyzer could be reimplemented as a `custom` analyzer as follow } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -1181,6 +1229,7 @@ The `persian` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /persian_example { "settings": { "analysis": { @@ -1212,6 +1261,7 @@ The `persian` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> Replaces zero-width non-joiners with an ASCII space. <2> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. @@ -1223,6 +1273,7 @@ The `portuguese` analyzer could be reimplemented as a `custom` analyzer as follo [source,js] ---------------------------------------------------- +PUT /portuguese_example { "settings": { "analysis": { @@ -1233,7 +1284,7 @@ The `portuguese` analyzer could be reimplemented as a `custom` analyzer as follo }, "portuguese_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["exemplo"] <2> }, "portuguese_stemmer": { "type": "stemmer", @@ -1255,6 +1306,7 @@ The `portuguese` analyzer could be reimplemented as a `custom` analyzer as follo } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -1267,6 +1319,7 @@ The `romanian` analyzer could be reimplemented as a `custom` analyzer as follows [source,js] ---------------------------------------------------- +PUT /romanian_example { "settings": { "analysis": { @@ -1277,7 +1330,7 @@ The `romanian` analyzer could be reimplemented as a `custom` analyzer as follows }, "romanian_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["exemplu"] <2> }, "romanian_stemmer": { "type": "stemmer", @@ -1299,6 +1352,7 @@ The `romanian` analyzer could be reimplemented as a `custom` analyzer as follows } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -1312,6 +1366,7 @@ The `russian` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /russian_example { "settings": { "analysis": { @@ -1322,7 +1377,7 @@ The `russian` analyzer could be reimplemented as a `custom` analyzer as follows: }, "russian_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["пример"] <2> }, "russian_stemmer": { "type": "stemmer", @@ -1344,6 +1399,7 @@ The `russian` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -1356,6 +1412,7 @@ The `sorani` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /sorani_example { "settings": { "analysis": { @@ -1366,7 +1423,7 @@ The `sorani` analyzer could be reimplemented as a `custom` analyzer as follows: }, "sorani_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["mînak"] <2> }, "sorani_stemmer": { "type": "stemmer", @@ -1389,6 +1446,7 @@ The `sorani` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -1401,6 +1459,7 @@ The `spanish` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /spanish_example { "settings": { "analysis": { @@ -1411,7 +1470,7 @@ The `spanish` analyzer could be reimplemented as a `custom` analyzer as follows: }, "spanish_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["ejemplo"] <2> }, "spanish_stemmer": { "type": "stemmer", @@ -1433,6 +1492,7 @@ The `spanish` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -1445,6 +1505,7 @@ The `swedish` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /swidish_example { "settings": { "analysis": { @@ -1455,7 +1516,7 @@ The `swedish` analyzer could be reimplemented as a `custom` analyzer as follows: }, "swedish_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["exempel"] <2> }, "swedish_stemmer": { "type": "stemmer", @@ -1477,6 +1538,7 @@ The `swedish` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -1489,6 +1551,7 @@ The `turkish` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /turkish_example { "settings": { "analysis": { @@ -1503,7 +1566,7 @@ The `turkish` analyzer could be reimplemented as a `custom` analyzer as follows: }, "turkish_keywords": { "type": "keyword_marker", - "keywords": [] <2> + "keywords": ["örnek"] <2> }, "turkish_stemmer": { "type": "stemmer", @@ -1526,6 +1589,7 @@ The `turkish` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. <2> This filter should be removed unless there are words which should @@ -1538,6 +1602,7 @@ The `thai` analyzer could be reimplemented as a `custom` analyzer as follows: [source,js] ---------------------------------------------------- +PUT /thai_example { "settings": { "analysis": { @@ -1560,5 +1625,6 @@ The `thai` analyzer could be reimplemented as a `custom` analyzer as follows: } } ---------------------------------------------------- +// CONSOLE <1> The default stopwords can be overridden with the `stopwords` or `stopwords_path` parameters. diff --git a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc index 7d059253e70..64ab3999ef9 100644 --- a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc @@ -366,9 +366,8 @@ The above example produces the following terms: The regex above is easier to understand as: -[source,js] +[source,regex] -------------------------------------------------- - ([^\p{L}\d]+) # swallow non letters and numbers, | (?<=\D)(?=\d) # or non-number followed by number, | (?<=\d)(?=\D) # or number followed by non-number, diff --git a/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc b/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc index 217b618c9c2..6c1a1875d67 100644 --- a/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc +++ b/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc @@ -43,14 +43,14 @@ POST _analyze The above example returns the term: -[source,js] +[source,text] --------------------------- [ \nI'm so happy!\n ] --------------------------- The same example with the `standard` tokenizer would return the following terms: -[source,js] +[source,text] --------------------------- [ I'm, so, happy ] --------------------------- diff --git a/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc b/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc index 32ee14d8f55..6f21f4521d3 100644 --- a/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc +++ b/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc @@ -79,7 +79,9 @@ POST my_index/_analyze } ---------------------------- // CONSOLE -// TEST[skip:Test interprets $1 as a stashed variable] +// TEST[s/\$1//] +// the test framework doesn't like the $1 so we just throw it away rather than +// try to get it to work properly. At least we are still testing the charfilter. The above example produces the following term: @@ -88,7 +90,6 @@ The above example produces the following term: [ My, credit, card, is 123_456_789 ] --------------------------- - WARNING: Using a replacement string that changes the length of the original text will work for search purposes, but will result in incorrect highlighting, as can be seen in the following example. @@ -193,7 +194,7 @@ POST my_index/_analyze The above returns the following terms: -[source,js] +[source,text] ---------------------------- [ the, foo, bar, baz, method ] ---------------------------- diff --git a/docs/reference/analysis/tokenfilters/asciifolding-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/asciifolding-tokenfilter.asciidoc index 68891c18e23..73d35549da8 100644 --- a/docs/reference/analysis/tokenfilters/asciifolding-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/asciifolding-tokenfilter.asciidoc @@ -8,17 +8,21 @@ equivalents, if one exists. Example: [source,js] -------------------------------------------------- -"index" : { - "analysis" : { - "analyzer" : { - "default" : { - "tokenizer" : "standard", - "filter" : ["standard", "asciifolding"] +PUT /asciifold_example +{ + "settings" : { + "analysis" : { + "analyzer" : { + "default" : { + "tokenizer" : "standard", + "filter" : ["standard", "asciifolding"] + } } } } } -------------------------------------------------- +// CONSOLE Accepts `preserve_original` setting which defaults to false but if true will keep the original token as well as emit the folded token. For @@ -26,20 +30,24 @@ example: [source,js] -------------------------------------------------- -"index" : { - "analysis" : { - "analyzer" : { - "default" : { - "tokenizer" : "standard", - "filter" : ["standard", "my_ascii_folding"] - } - }, - "filter" : { - "my_ascii_folding" : { - "type" : "asciifolding", - "preserve_original" : true +PUT /asciifold_example +{ + "settings" : { + "analysis" : { + "analyzer" : { + "default" : { + "tokenizer" : "standard", + "filter" : ["standard", "my_ascii_folding"] + } + }, + "filter" : { + "my_ascii_folding" : { + "type" : "asciifolding", + "preserve_original" : true + } } } } } -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/analysis/tokenfilters/cjk-bigram-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/cjk-bigram-tokenfilter.asciidoc index c1e278b2183..cc26d025f04 100644 --- a/docs/reference/analysis/tokenfilters/cjk-bigram-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/cjk-bigram-tokenfilter.asciidoc @@ -16,8 +16,9 @@ Bigrams are generated for characters in `han`, `hiragana`, `katakana` and [source,js] -------------------------------------------------- +PUT /cjk_bigram_example { - "index" : { + "settings" : { "analysis" : { "analyzer" : { "han_bigrams" : { @@ -40,3 +41,4 @@ Bigrams are generated for characters in `han`, `hiragana`, `katakana` and } } -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/analysis/tokenfilters/common-grams-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/common-grams-tokenfilter.asciidoc index eb1469af803..c7d8ff660d3 100644 --- a/docs/reference/analysis/tokenfilters/common-grams-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/common-grams-tokenfilter.asciidoc @@ -41,21 +41,33 @@ Here is an example: [source,js] -------------------------------------------------- -index : - analysis : - analyzer : - index_grams : - tokenizer : whitespace - filter : [common_grams] - search_grams : - tokenizer : whitespace - filter : [common_grams_query] - filter : - common_grams : - type : common_grams - common_words: [a, an, the] - common_grams_query : - type : common_grams - query_mode: true - common_words: [a, an, the] +PUT /common_grams_example +{ + "settings": { + "analysis": { + "my_analyzer": { + "index_grams": { + "tokenizer": "whitespace", + "filter": ["common_grams"] + }, + "search_grams": { + "tokenizer": "whitespace", + "filter": ["common_grams_query"] + } + }, + "filter": { + "common_grams": { + "type": "common_grams", + "common_words": ["a", "an", "the"] + }, + "common_grams_query": { + "type": "common_grams", + "query_mode": true, + "common_words": ["a", "an", "the"] + } + } + } + } +} -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/analysis/tokenfilters/compound-word-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/compound-word-tokenfilter.asciidoc index 1268727b2ef..e790ed4c4b5 100644 --- a/docs/reference/analysis/tokenfilters/compound-word-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/compound-word-tokenfilter.asciidoc @@ -1,5 +1,5 @@ [[analysis-compound-word-tokenfilter]] -=== Compound Word Token Filter +=== Compound Word Token Filters The `hyphenation_decompounder` and `dictionary_decompounder` token filters can decompose compound words found in many German languages into word parts. @@ -26,7 +26,7 @@ output tokens is directly connected to the quality of the grammar file you use. For languages like German they are quite good. XML based hyphenation grammar files can be found in the -http://offo.sourceforge.net/hyphenation/#FOP+XML+Hyphenation+Patterns[Objects For Formatting Objects] +http://offo.sourceforge.net/#FOP+XML+Hyphenation+Patterns[Objects For Formatting Objects] (OFFO) Sourceforge project. Currently only FOP v1.2 compatible hyphenation files are supported. You can download https://sourceforge.net/projects/offo/files/offo-hyphenation/1.2/offo-hyphenation_v1.2.zip/download[offo-hyphenation_v1.2.zip] directly and look in the `offo-hyphenation/hyph/` directory. @@ -84,20 +84,31 @@ Here is an example: [source,js] -------------------------------------------------- -index : - analysis : - analyzer : - myAnalyzer2 : - type : custom - tokenizer : standard - filter : [myTokenFilter1, myTokenFilter2] - filter : - myTokenFilter1 : - type : dictionary_decompounder - word_list: [one, two, three] - myTokenFilter2 : - type : hyphenation_decompounder - word_list_path: path/to/words.txt - hyphenation_patterns_path: path/to/fop.xml - max_subword_size : 22 +PUT /compound_word_example +{ + "index": { + "analysis": { + "analyzer": { + "my_analyzer": { + "type": "custom", + "tokenizer": "standard", + "filter": ["dictionary_decompounder", "hyphenation_decompounder"] + } + }, + "filter": { + "dictionary_decompounder": { + "type": "dictionary_decompounder", + "word_list": ["one", "two", "three"] + }, + "hyphenation_decompounder": { + "type" : "hyphenation_decompounder", + "word_list_path": "analysis/example_word_list.txt", + "hyphenation_patterns_path": "analysis/hyphenation_patterns.xml", + "max_subword_size": 22 + } + } + } + } +} -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/analysis/tokenfilters/elision-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/elision-tokenfilter.asciidoc index c44ccffd51e..956c5ad13d0 100644 --- a/docs/reference/analysis/tokenfilters/elision-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/elision-tokenfilter.asciidoc @@ -9,20 +9,24 @@ example: [source,js] -------------------------------------------------- -"index" : { - "analysis" : { - "analyzer" : { - "default" : { - "tokenizer" : "standard", - "filter" : ["standard", "elision"] - } - }, - "filter" : { - "elision" : { - "type" : "elision", - "articles" : ["l", "m", "t", "qu", "n", "s", "j"] +PUT /elision_example +{ + "settings" : { + "analysis" : { + "analyzer" : { + "default" : { + "tokenizer" : "standard", + "filter" : ["standard", "elision"] + } + }, + "filter" : { + "elision" : { + "type" : "elision", + "articles" : ["l", "m", "t", "qu", "n", "s", "j"] + } } } } } -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/analysis/tokenfilters/hunspell-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/hunspell-tokenfilter.asciidoc index 9b3f188d951..cef687f7619 100644 --- a/docs/reference/analysis/tokenfilters/hunspell-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/hunspell-tokenfilter.asciidoc @@ -10,7 +10,7 @@ one or more `*.dic` files (all of which will automatically be picked up). For example, assuming the default hunspell location is used, the following directory layout will define the `en_US` dictionary: -[source,js] +[source,txt] -------------------------------------------------- - conf |-- hunspell @@ -42,24 +42,28 @@ settings: [source,js] -------------------------------------------------- +PUT /hunspell_example { - "analysis" : { - "analyzer" : { - "en" : { - "tokenizer" : "standard", - "filter" : [ "lowercase", "en_US" ] - } - }, - "filter" : { - "en_US" : { - "type" : "hunspell", - "locale" : "en_US", - "dedup" : true + "settings": { + "analysis" : { + "analyzer" : { + "en" : { + "tokenizer" : "standard", + "filter" : [ "lowercase", "en_US" ] + } + }, + "filter" : { + "en_US" : { + "type" : "hunspell", + "locale" : "en_US", + "dedup" : true + } } } } } -------------------------------------------------- +// CONSOLE The hunspell token filter accepts four options: diff --git a/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc index bb1103dff8a..afaf4f8fa8c 100644 --- a/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc @@ -1,7 +1,7 @@ [[analysis-keep-types-tokenfilter]] === Keep Types Token Filter -A token filter of type `keep_types` that only keeps tokens with a token type +A token filter of type `keep_types` that only keeps tokens with a token type contained in a predefined set. @@ -14,24 +14,61 @@ types:: a list of types to keep [float] === Settings example +You can set it up like: + [source,js] -------------------------------------------------- +PUT /keep_types_example { - "index" : { + "settings" : { "analysis" : { "analyzer" : { "my_analyzer" : { "tokenizer" : "standard", "filter" : ["standard", "lowercase", "extract_numbers"] - }, + } }, "filter" : { "extract_numbers" : { "type" : "keep_types", "types" : [ "" ] - }, + } } } } } -------------------------------------------------- +// CONSOLE + +And test it like: + +[source,js] +-------------------------------------------------- +POST /keep_types_example/_analyze +{ + "analyzer" : "my_analyzer", + "text" : "this is just 1 a test" +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +And it'd respond: + +[source,js] +-------------------------------------------------- +{ + "tokens": [ + { + "token": "1", + "start_offset": 13, + "end_offset": 14, + "type": "", + "position": 3 + } + ] +} +-------------------------------------------------- +// TESTRESPONSE + +Note how only the `` token is in the output. diff --git a/docs/reference/analysis/tokenfilters/keep-words-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/keep-words-tokenfilter.asciidoc index e4abbeff15d..50c74942a01 100644 --- a/docs/reference/analysis/tokenfilters/keep-words-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/keep-words-tokenfilter.asciidoc @@ -20,17 +20,18 @@ keep_words_case:: a boolean indicating whether to lower case the words (defaults [source,js] -------------------------------------------------- +PUT /keep_words_example { - "index" : { + "settings" : { "analysis" : { "analyzer" : { - "my_analyzer" : { + "example_1" : { "tokenizer" : "standard", "filter" : ["standard", "lowercase", "words_till_three"] }, - "my_analyzer1" : { + "example_2" : { "tokenizer" : "standard", - "filter" : ["standard", "lowercase", "words_on_file"] + "filter" : ["standard", "lowercase", "words_in_file"] } }, "filter" : { @@ -38,12 +39,13 @@ keep_words_case:: a boolean indicating whether to lower case the words (defaults "type" : "keep", "keep_words" : [ "one", "two", "three"] }, - "words_on_file" : { + "words_in_file" : { "type" : "keep", - "keep_words_path" : "/path/to/word/file" + "keep_words_path" : "analysis/example_word_list.txt" } } } } } -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/analysis/tokenfilters/keyword-marker-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/keyword-marker-tokenfilter.asciidoc index e43687e29dc..1f1e4e655c5 100644 --- a/docs/reference/analysis/tokenfilters/keyword-marker-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/keyword-marker-tokenfilter.asciidoc @@ -12,23 +12,131 @@ any stemming filters. |`keywords_path` |A path (either relative to `config` location, or absolute) to a list of words. +|`keywords_pattern` |A regular expression pattern to match against words +in the text. + |`ignore_case` |Set to `true` to lower case all words first. Defaults to `false`. |======================================================================= -Here is an example: +You can configure it like: [source,js] -------------------------------------------------- -index : - analysis : - analyzer : - myAnalyzer : - type : custom - tokenizer : standard - filter : [lowercase, protwords, porter_stem] - filter : - protwords : - type : keyword_marker - keywords_path : analysis/protwords.txt +PUT /keyword_marker_example +{ + "settings": { + "analysis": { + "analyzer": { + "protect_cats": { + "type": "custom", + "tokenizer": "standard", + "filter": ["lowercase", "protect_cats", "porter_stem"] + }, + "normal": { + "type": "custom", + "tokenizer": "standard", + "filter": ["lowercase", "porter_stem"] + } + }, + "filter": { + "protect_cats": { + "type": "keyword_marker", + "keywords": ["cats"] + } + } + } + } +} -------------------------------------------------- +// CONSOLE + +And test it with: + +[source,js] +-------------------------------------------------- +POST /keyword_marker_example/_analyze +{ + "analyzer" : "protect_cats", + "text" : "I like cats" +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +And it'd respond: + +[source,js] +-------------------------------------------------- +{ + "tokens": [ + { + "token": "i", + "start_offset": 0, + "end_offset": 1, + "type": "", + "position": 0 + }, + { + "token": "like", + "start_offset": 2, + "end_offset": 6, + "type": "", + "position": 1 + }, + { + "token": "cats", + "start_offset": 7, + "end_offset": 11, + "type": "", + "position": 2 + } + ] +} +-------------------------------------------------- +// TESTRESPONSE + +As compared to the `normal` analyzer which has `cats` stemmed to `cat`: + +[source,js] +-------------------------------------------------- +POST /keyword_marker_example/_analyze +{ + "analyzer" : "normal", + "text" : "I like cats" +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +Response: + +[source,js] +-------------------------------------------------- +{ + "tokens": [ + { + "token": "i", + "start_offset": 0, + "end_offset": 1, + "type": "", + "position": 0 + }, + { + "token": "like", + "start_offset": 2, + "end_offset": 6, + "type": "", + "position": 1 + }, + { + "token": "cat", + "start_offset": 7, + "end_offset": 11, + "type": "", + "position": 2 + } + ] +} +-------------------------------------------------- +// TESTRESPONSE diff --git a/docs/reference/analysis/tokenfilters/keyword-repeat-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/keyword-repeat-tokenfilter.asciidoc index aa8c7a9b752..044e8c14769 100644 --- a/docs/reference/analysis/tokenfilters/keyword-repeat-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/keyword-repeat-tokenfilter.asciidoc @@ -9,18 +9,85 @@ subsequent stemmer will be indexed twice. Therefore, consider adding a `unique` filter with `only_on_same_position` set to `true` to drop unnecessary duplicates. -Here is an example: +Here is an example of using the `keyword_repeat` token filter to +preserve both the stemmed and unstemmed version of tokens: [source,js] -------------------------------------------------- -index : - analysis : - analyzer : - myAnalyzer : - type : custom - tokenizer : standard - filter : [lowercase, keyword_repeat, porter_stem, unique_stem] - unique_stem: - type: unique - only_on_same_position : true +PUT /keyword_repeat_example +{ + "settings": { + "analysis": { + "analyzer": { + "stemmed_and_unstemmed": { + "type": "custom", + "tokenizer": "standard", + "filter": ["lowercase", "keyword_repeat", "porter_stem", "unique_stem"] + } + }, + "filter": { + "unique_stem": { + "type": "unique", + "only_on_same_position": true + } + } + } + } +} -------------------------------------------------- +// CONSOLE + +And you can test it with: + +[source,js] +-------------------------------------------------- +POST /keyword_repeat_example/_analyze +{ + "analyzer" : "stemmed_and_unstemmed", + "text" : "I like cats" +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +And it'd respond: + +[source,js] +-------------------------------------------------- +{ + "tokens": [ + { + "token": "i", + "start_offset": 0, + "end_offset": 1, + "type": "", + "position": 0 + }, + { + "token": "like", + "start_offset": 2, + "end_offset": 6, + "type": "", + "position": 1 + }, + { + "token": "cats", + "start_offset": 7, + "end_offset": 11, + "type": "", + "position": 2 + }, + { + "token": "cat", + "start_offset": 7, + "end_offset": 11, + "type": "", + "position": 2 + } + ] +} +-------------------------------------------------- +// TESTRESPONSE + +Which preserves both the `cat` and `cats` tokens. Compare this to the example +on the <>. diff --git a/docs/reference/analysis/tokenfilters/limit-token-count-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/limit-token-count-tokenfilter.asciidoc index a6598be6095..ba2018c1076 100644 --- a/docs/reference/analysis/tokenfilters/limit-token-count-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/limit-token-count-tokenfilter.asciidoc @@ -18,15 +18,25 @@ Here is an example: [source,js] -------------------------------------------------- -index : - analysis : - analyzer : - myAnalyzer : - type : custom - tokenizer : standard - filter : [lowercase, five_token_limit] - filter : - five_token_limit : - type : limit - max_token_count : 5 +PUT /limit_example +{ + "settings": { + "analysis": { + "analyzer": { + "limit_example": { + "type": "custom", + "tokenizer": "standard", + "filter": ["lowercase", "five_token_limit"] + } + }, + "filter": { + "five_token_limit": { + "type": "limit", + "max_token_count": 5 + } + } + } + } +} -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/analysis/tokenfilters/lowercase-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/lowercase-tokenfilter.asciidoc index 674dfe541c9..519fd77ba2a 100644 --- a/docs/reference/analysis/tokenfilters/lowercase-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/lowercase-tokenfilter.asciidoc @@ -10,28 +10,30 @@ custom analyzer [source,js] -------------------------------------------------- -index : - analysis : - analyzer : - myAnalyzer2 : - type : custom - tokenizer : myTokenizer1 - filter : [myTokenFilter1, myGreekLowerCaseFilter] - char_filter : [my_html] - tokenizer : - myTokenizer1 : - type : standard - max_token_length : 900 - filter : - myTokenFilter1 : - type : stop - stopwords : [stop1, stop2, stop3, stop4] - myGreekLowerCaseFilter : - type : lowercase - language : greek - char_filter : - my_html : - type : html_strip - escaped_tags : [xxx, yyy] - read_ahead : 1024 +PUT /lowercase_example +{ + "settings": { + "analysis": { + "analyzer": { + "standard_lowercase_example": { + "type": "custom", + "tokenizer": "standard", + "filter": ["lowercase"] + }, + "greek_lowercase_example": { + "type": "custom", + "tokenizer": "standard", + "filter": ["greek_lowercase"] + } + }, + "filter": { + "greek_lowercase": { + "type": "lowercase", + "language": "greek" + } + } + } + } +} -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/cluster/tasks.asciidoc b/docs/reference/cluster/tasks.asciidoc index e087eebd9c7..f1f9a669312 100644 --- a/docs/reference/cluster/tasks.asciidoc +++ b/docs/reference/cluster/tasks.asciidoc @@ -165,9 +165,10 @@ If a long-running task supports cancellation, it can be cancelled by the followi [source,js] -------------------------------------------------- -POST _tasks/task_id:1/_cancel +POST _tasks/node_id:task_id/_cancel -------------------------------------------------- // CONSOLE +// TEST[s/task_id/1/] The task cancellation command supports the same task selection parameters as the list tasks command, so multiple tasks can be cancelled at the same time. For example, the following command will cancel all reindex tasks running on the diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index 3cf371644f0..87d20b35221 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -216,8 +216,9 @@ And the response: epoch timestamp cluster status node.total node.data shards pri relo init unassign pending_tasks max_task_wait_time active_shards_percent 1475247709 17:01:49 elasticsearch green 1 1 0 0 0 0 0 0 - 100.0% -------------------------------------------------- -// TESTRESPONSE[s/0 0/0 [01]/] -// TESTRESPONSE[s/1475247709 17:01:49 elasticsearch/\\d+ \\d+:\\d+:\\d+ docs_integTestCluster/ _cat] +// TESTRESPONSE[s/1475247709 17:01:49 elasticsearch/\\d+ \\d+:\\d+:\\d+ docs_integTestCluster/] +// TESTRESPONSE[s/0 0 -/0 \\d+ -/] +// TESTRESPONSE[_cat] We can see that our cluster named "elasticsearch" is up with a green status. diff --git a/docs/reference/indices/put-mapping.asciidoc b/docs/reference/indices/put-mapping.asciidoc index d2e0bf5fd77..6bc623b95e2 100644 --- a/docs/reference/indices/put-mapping.asciidoc +++ b/docs/reference/indices/put-mapping.asciidoc @@ -76,7 +76,6 @@ exceptions to this rule. For instance: * new <> can be added to <> fields. * new <> can be added to existing fields. -* <> can be disabled, but not enabled. * the <> parameter can be updated. For example: diff --git a/docs/reference/mapping/params/fielddata.asciidoc b/docs/reference/mapping/params/fielddata.asciidoc index 6795b0f5b9b..baca3c426d6 100644 --- a/docs/reference/mapping/params/fielddata.asciidoc +++ b/docs/reference/mapping/params/fielddata.asciidoc @@ -135,8 +135,7 @@ whenever a once new segment becomes visible. The loading time of global ordinals depends on the number of terms in a field, but in general it is low, since it source field data has already been loaded. The memory overhead of global ordinals is a small because it is very -efficiently compressed. Eager loading of global ordinals can move the loading -time from the first search request, to the refresh itself. +efficiently compressed. ***************************************** diff --git a/docs/reference/mapping/params/similarity.asciidoc b/docs/reference/mapping/params/similarity.asciidoc index 66b1d8a42cf..0a5979c9d32 100644 --- a/docs/reference/mapping/params/similarity.asciidoc +++ b/docs/reference/mapping/params/similarity.asciidoc @@ -3,7 +3,7 @@ Elasticsearch allows you to configure a scoring algorithm or _similarity_ per field. The `similarity` setting provides a simple way of choosing a similarity -algorithm other than the default TF/IDF, such as `BM25`. +algorithm other than the default `BM25`, such as `TF/IDF`. Similarities are mostly useful for <> fields, but can also apply to other field types. @@ -25,6 +25,11 @@ configuration are: Lucene. See {defguide}/practical-scoring-function.html[Lucene’s Practical Scoring Function] for more information. +`boolean`:: + A simple boolean similarity, which is used when full-text ranking is not needed + and the score should only be based on whether the query terms match or not. + Boolean similarity gives terms a score equal to their query boost. + The `similarity` can be set on the field level when a field is first created, as follows: @@ -42,6 +47,10 @@ PUT my_index "classic_field": { "type": "text", "similarity": "classic" <2> + }, + "boolean_sim_field": { + "type": "text", + "similarity": "boolean" <3> } } } @@ -51,3 +60,4 @@ PUT my_index // CONSOLE <1> The `default_field` uses the `BM25` similarity. <2> The `classic_field` uses the `classic` similarity (ie TF/IDF). +<3> The `boolean_sim_field` uses the `boolean` similarity. diff --git a/docs/reference/mapping/types/geo-point.asciidoc b/docs/reference/mapping/types/geo-point.asciidoc index a839117c7c5..ccfc93e34c5 100644 --- a/docs/reference/mapping/types/geo-point.asciidoc +++ b/docs/reference/mapping/types/geo-point.asciidoc @@ -111,20 +111,17 @@ When accessing the value of a geo-point in a script, the value is returned as a `GeoPoint` object, which allows access to the `.lat` and `.lon` values respectively: - -[source,js] +[source,painless] -------------------------------------------------- -geopoint = doc['location'].value; -lat = geopoint.lat; -lon = geopoint.lon; +def geopoint = doc['location'].value; +def lat = geopoint.lat; +def lon = geopoint.lon; -------------------------------------------------- For performance reasons, it is better to access the lat/lon values directly: -[source,js] +[source,painless] -------------------------------------------------- -lat = doc['location'].lat; -lon = doc['location'].lon; +def lat = doc['location'].lat; +def lon = doc['location'].lon; -------------------------------------------------- - - diff --git a/docs/reference/mapping/types/geo-shape.asciidoc b/docs/reference/mapping/types/geo-shape.asciidoc index 4fe185fe463..18ffdbcbc63 100644 --- a/docs/reference/mapping/types/geo-shape.asciidoc +++ b/docs/reference/mapping/types/geo-shape.asciidoc @@ -156,16 +156,23 @@ cell right next to it -- even though the shape is very close to the point. [source,js] -------------------------------------------------- +PUT /example { - "properties": { - "location": { - "type": "geo_shape", - "tree": "quadtree", - "precision": "1m" + "mappings": { + "doc": { + "properties": { + "location": { + "type": "geo_shape", + "tree": "quadtree", + "precision": "1m" + } + } } } } -------------------------------------------------- +// CONSOLE +// TESTSETUP This mapping maps the location field to the geo_shape type using the quad_tree implementation and a precision of 1m. Elasticsearch translates @@ -240,6 +247,7 @@ API. [source,js] -------------------------------------------------- +POST /example/doc { "location" : { "type" : "point", @@ -247,6 +255,7 @@ API. } } -------------------------------------------------- +// CONSOLE [float] ===== http://geojson.org/geojson-spec.html#id3[LineString] @@ -257,6 +266,7 @@ line. Specifying more than two points creates an arbitrary path. [source,js] -------------------------------------------------- +POST /example/doc { "location" : { "type" : "linestring", @@ -264,6 +274,7 @@ line. Specifying more than two points creates an arbitrary path. } } -------------------------------------------------- +// CONSOLE The above `linestring` would draw a straight line starting at the White House to the US Capitol Building. @@ -277,6 +288,7 @@ closed). [source,js] -------------------------------------------------- +POST /example/doc { "location" : { "type" : "polygon", @@ -286,12 +298,14 @@ closed). } } -------------------------------------------------- +// CONSOLE The first array represents the outer boundary of the polygon, the other arrays represent the interior shapes ("holes"): [source,js] -------------------------------------------------- +POST /example/doc { "location" : { "type" : "polygon", @@ -302,6 +316,8 @@ arrays represent the interior shapes ("holes"): } } -------------------------------------------------- +// CONSOLE +// TEST[skip:https://github.com/elastic/elasticsearch/issues/23836] *IMPORTANT NOTE:* GeoJSON does not mandate a specific order for vertices thus ambiguous polygons around the dateline and poles are possible. To alleviate ambiguity @@ -322,6 +338,7 @@ OGC standards to eliminate ambiguity resulting in a polygon that crosses the dat [source,js] -------------------------------------------------- +POST /example/doc { "location" : { "type" : "polygon", @@ -332,6 +349,8 @@ OGC standards to eliminate ambiguity resulting in a polygon that crosses the dat } } -------------------------------------------------- +// CONSOLE +// TEST[skip:https://github.com/elastic/elasticsearch/issues/23836] An `orientation` parameter can be defined when setting the geo_shape mapping (see <>). This will define vertex order for the coordinate list on the mapped geo_shape field. It can also be overridden on each document. The following is an example for @@ -339,6 +358,7 @@ overriding the orientation on a document: [source,js] -------------------------------------------------- +POST /example/doc { "location" : { "type" : "polygon", @@ -350,6 +370,8 @@ overriding the orientation on a document: } } -------------------------------------------------- +// CONSOLE +// TEST[skip:https://github.com/elastic/elasticsearch/issues/23836] [float] ===== http://www.geojson.org/geojson-spec.html#id5[MultiPoint] @@ -358,6 +380,7 @@ A list of geojson points. [source,js] -------------------------------------------------- +POST /example/doc { "location" : { "type" : "multipoint", @@ -367,6 +390,7 @@ A list of geojson points. } } -------------------------------------------------- +// CONSOLE [float] ===== http://www.geojson.org/geojson-spec.html#id6[MultiLineString] @@ -375,6 +399,7 @@ A list of geojson linestrings. [source,js] -------------------------------------------------- +POST /example/doc { "location" : { "type" : "multilinestring", @@ -386,6 +411,8 @@ A list of geojson linestrings. } } -------------------------------------------------- +// CONSOLE +// TEST[skip:https://github.com/elastic/elasticsearch/issues/23836] [float] ===== http://www.geojson.org/geojson-spec.html#id7[MultiPolygon] @@ -394,18 +421,20 @@ A list of geojson polygons. [source,js] -------------------------------------------------- +POST /example/doc { "location" : { "type" : "multipolygon", "coordinates" : [ [ [[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]] ], - [ [[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]], [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]] ] ] } } -------------------------------------------------- +// CONSOLE +// TEST[skip:https://github.com/elastic/elasticsearch/issues/23836] [float] ===== http://geojson.org/geojson-spec.html#geometrycollection[Geometry Collection] @@ -414,6 +443,7 @@ A collection of geojson geometry objects. [source,js] -------------------------------------------------- +POST /example/doc { "location" : { "type": "geometrycollection", @@ -430,7 +460,8 @@ A collection of geojson geometry objects. } } -------------------------------------------------- - +// CONSOLE +// TEST[skip:https://github.com/elastic/elasticsearch/issues/23836] [float] ===== Envelope @@ -441,6 +472,7 @@ bounding rectangle: [source,js] -------------------------------------------------- +POST /example/doc { "location" : { "type" : "envelope", @@ -448,6 +480,8 @@ bounding rectangle: } } -------------------------------------------------- +// CONSOLE +// TEST[skip:https://github.com/elastic/elasticsearch/issues/23836] [float] ===== Circle @@ -457,6 +491,7 @@ point with a radius: [source,js] -------------------------------------------------- +POST /example/doc { "location" : { "type" : "circle", @@ -465,6 +500,7 @@ point with a radius: } } -------------------------------------------------- +// CONSOLE Note: The inner `radius` field is required. If not specified, then the units of the `radius` will default to `METERS`. diff --git a/docs/reference/migration/migrate_6_0/java.asciidoc b/docs/reference/migration/migrate_6_0/java.asciidoc index e5f251c1c43..991fe165fb2 100644 --- a/docs/reference/migration/migrate_6_0/java.asciidoc +++ b/docs/reference/migration/migrate_6_0/java.asciidoc @@ -7,3 +7,9 @@ Previously the `setSource` methods and other methods that accepted byte/string r an object source did not require the XContentType to be specified. The auto-detection of the content type is no longer used, so these methods now require the XContentType as an additional argument when providing the source in bytes or as a string. + +=== `DeleteByQueryRequest` requires an explicitly set query + +In previous versions of Elasticsearch, delete by query requests without an explicit query +were accepted, match_all was used as the default query and all documents were deleted +as a result. From version 6.0.0, a `DeleteByQueryRequest` requires an explicit query be set. diff --git a/docs/reference/migration/migrate_6_0/plugins.asciidoc b/docs/reference/migration/migrate_6_0/plugins.asciidoc index be650a71bd0..d2032d683b5 100644 --- a/docs/reference/migration/migrate_6_0/plugins.asciidoc +++ b/docs/reference/migration/migrate_6_0/plugins.asciidoc @@ -25,3 +25,11 @@ the region of the configured bucket. * The container an azure repository is configured with will no longer be created automatically. It must exist before the azure repository is created. + +* Global repositories settings you are able to set in elasticsearch config file under `repositories.azure` +name space have been removed. This includes `repositories.azure.account`, `repositories.azure.container`, +`repositories.azure.base_path`, `repositories.azure.location_mode`, `repositories.azure.chunk_size` and +`repositories.azure.compress`. +You must set those settings per repository instead. Respectively `account`, `container`, `base_path`, +`location_mode`, `chunk_size` and `compress`. +See {plugins}/repository-azure-usage.html#repository-azure-repository-settings[Azure Repository settings]. diff --git a/docs/reference/migration/migrate_6_0/rest.asciidoc b/docs/reference/migration/migrate_6_0/rest.asciidoc index 934d2c2e647..5ef09a15bff 100644 --- a/docs/reference/migration/migrate_6_0/rest.asciidoc +++ b/docs/reference/migration/migrate_6_0/rest.asciidoc @@ -47,3 +47,9 @@ requests. Refresh requests that are broadcast to multiple shards that can have one or more shards fail during the request now return a 500 response instead of a 200 response in the event there is at least one failure. + +=== Delete by Query API requires an explicit query + +In previous versions of Elasticsearch, delete by query requests without an explicit query +were accepted, match_all was used as the default query and all documents were deleted +as a result. From version 6.0.0, delete by query requests require an explicit query. diff --git a/docs/reference/modules/node.asciidoc b/docs/reference/modules/node.asciidoc index 5ca8acf37cf..70242c1a5e2 100644 --- a/docs/reference/modules/node.asciidoc +++ b/docs/reference/modules/node.asciidoc @@ -209,10 +209,12 @@ To create a dedicated ingest node, set: node.master: false <1> node.data: false <2> node.ingest: true <3> +search.remote.connect: false <4> ------------------- <1> Disable the `node.master` role (enabled by default). <2> Disable the `node.data` role (enabled by default). <3> The `node.ingest` role is enabled by default. +<4> Disable cross-cluster search (enabled by default). [float] [[coordinating-only-node]] @@ -235,17 +237,19 @@ acknowledgement of cluster state updates from every node! The benefit of coordinating only nodes should not be overstated -- data nodes can happily serve the same purpose. -To create a coordinating only node, set: +To create a dedicated coordinating node, set: [source,yaml] ------------------- node.master: false <1> node.data: false <2> node.ingest: false <3> +search.remote.connect: false <4> ------------------- <1> Disable the `node.master` role (enabled by default). <2> Disable the `node.data` role (enabled by default). <3> Disable the `node.ingest` role (enabled by default). +<4> Disable cross-cluster search (enabled by default). [float] == Node data path settings diff --git a/docs/reference/modules/scripting/using.asciidoc b/docs/reference/modules/scripting/using.asciidoc index 7c89201c7fc..0a64758f5aa 100644 --- a/docs/reference/modules/scripting/using.asciidoc +++ b/docs/reference/modules/scripting/using.asciidoc @@ -12,6 +12,7 @@ the same pattern: "params": { ... } <3> } ------------------------------------- +// NOTCONSOLE <1> The language the script is written in, which defaults to `painless`. <2> The script itself which may be specified as `inline`, `stored`, or `file`. <3> Any named parameters that should be passed into the script. @@ -89,6 +90,7 @@ multipliers, don't hard-code the multiplier into the script: ---------------------- "inline": "doc['my_field'] * 2" ---------------------- +// NOTCONSOLE Instead, pass it in as a named parameter: @@ -99,6 +101,7 @@ Instead, pass it in as a named parameter: "multiplier": 2 } ---------------------- +// NOTCONSOLE The first version has to be recompiled every time the multiplier changes. The second version is only compiled once. @@ -134,7 +137,7 @@ the following example creates a Groovy script called `calculate-score`: [source,sh] -------------------------------------------------- -cat "Math.log(_score * 2) + my_modifier" > config/scripts/calculate-score.painless +cat "Math.log(_score * 2) + params.my_modifier" > config/scripts/calculate_score.painless -------------------------------------------------- This script can be used as follows: @@ -147,7 +150,7 @@ GET my_index/_search "script": { "script": { "lang": "painless", <1> - "file": "calculate-score", <2> + "file": "calculate_score", <2> "params": { "my_modifier": 2 } @@ -156,6 +159,8 @@ GET my_index/_search } } -------------------------------------------------- +// CONSOLE +// TEST[continued] <1> The language of the script, which should correspond with the script file suffix. <2> The name of the script, which should be the name of the file. @@ -206,16 +211,10 @@ delete and put requests. ==== Request Examples -The following are examples of stored script requests: +The following are examples of using a stored script that lives at +`/_scripts/{id}`. -[source,js] ------------------------------------ -/_scripts/{id} <1> ------------------------------------ -<1> The `id` is a unique identifier for the stored script. - -This example stores a Painless script called `calculate-score` in the cluster -state: +First, create the script called `calculate-score` in the cluster state: [source,js] ----------------------------------- diff --git a/docs/reference/modules/snapshots.asciidoc b/docs/reference/modules/snapshots.asciidoc index 09f7008a6fb..94138dbdb0f 100644 --- a/docs/reference/modules/snapshots.asciidoc +++ b/docs/reference/modules/snapshots.asciidoc @@ -16,6 +16,19 @@ a 2.x cluster and use <> to rebuild the index in a 5.x cluster. This is as time consuming as restoring from archival copies of the original data. +Note: If a repository is connected to a 2.x cluster, and you want to connect +a 5.x cluster to the same repository, you will have to either first set the 2.x +repository to `readonly` mode (see below for details on `readonly` mode) or create +the 5.x repository in `readonly` mode. A 5.x cluster will update the repository +to conform to 5.x specific formats, which will mean that any new snapshots written +via the 2.x cluster will not be visible to the 5.x cluster, and vice versa. +In fact, as a general rule, only one cluster should connect to the same repository +location with write access; all other clusters connected to the same repository +should be set to `readonly` mode. While setting all but one repositories to +`readonly` should work with multiple clusters differing by one major version, +it is not a supported configuration. + + [float] === Repositories @@ -287,6 +300,35 @@ GET /_snapshot/my_backup/snapshot_1 // CONSOLE // TEST[continued] +This command returns basic information about the snapshot including start and end time, version of +elasticsearch that created the snapshot, the list of included indices, the current state of the +snapshot and the list of failures that occurred during the snapshot. The snapshot `state` can be + +[horizontal] +`IN_PROGRESS`:: + + The snapshot is currently running. + +`SUCCESS`:: + + The snapshot finished and all shards were stored successfully. + +`FAILED`:: + + The snapshot finished with an error and failed to store any data. + +`PARTIAL`:: + + The global cluster state was stored, but data of at least one shard wasn't stored successfully. + The `failure` section in this case should contain more detailed information about shards + that were not processed correctly. + +`INCOMPATIBLE`:: + + The snapshot was created with an old version of elasticsearch and therefore is incompatible with + the current version of the cluster. + + Similar as for repositories, information about multiple snapshots can be queried in one go, supporting wildcards as well: [source,sh] diff --git a/docs/reference/modules/threadpool.asciidoc b/docs/reference/modules/threadpool.asciidoc index 8f3a524dbdd..01b204b510f 100644 --- a/docs/reference/modules/threadpool.asciidoc +++ b/docs/reference/modules/threadpool.asciidoc @@ -32,7 +32,7 @@ There are several thread pools, but the important ones include: `bulk`:: For bulk operations. Thread pool type is `fixed` with a size of `# of available processors`, - queue_size of `50`. The maximum size for this pool + queue_size of `200`. The maximum size for this pool is `1 + # of available processors`. `snapshot`:: diff --git a/docs/reference/query-dsl/exists-query.asciidoc b/docs/reference/query-dsl/exists-query.asciidoc index 4971219366f..f0e76852da1 100644 --- a/docs/reference/query-dsl/exists-query.asciidoc +++ b/docs/reference/query-dsl/exists-query.asciidoc @@ -24,6 +24,7 @@ For instance, these documents would all match the above query: { "user": ["jane"] } { "user": ["jane", null ] } <3> -------------------------------------------------- +// NOTCONSOLE <1> An empty string is a non-`null` value. <2> Even though the `standard` analyzer would emit zero tokens, the original field is non-`null`. <3> At least one non-`null` value is required. @@ -37,6 +38,7 @@ These documents would *not* match the above query: { "user": [null] } <2> { "foo": "bar" } <3> -------------------------------------------------- +// NOTCONSOLE <1> This field has no values. <2> At least one non-`null` value is required. <3> The `user` field is missing completely. @@ -50,11 +52,21 @@ instance, if the `user` field were mapped as follows: [source,js] -------------------------------------------------- - "user": { - "type": "text", - "null_value": "_null_" +PUT /example +{ + "mappings": { + "doc": { + "properties": { + "user": { + "type": "keyword", + "null_value": "_null_" + } + } + } } +} -------------------------------------------------- +// CONSOLE then explicit `null` values would be indexed as the string `_null_`, and the following docs would match the `exists` filter: @@ -64,6 +76,7 @@ following docs would match the `exists` filter: { "user": null } { "user": [null] } -------------------------------------------------- +// NOTCONSOLE However, these docs--without explicit `null` values--would still have no values in the `user` field and thus would not match the `exists` filter: @@ -73,11 +86,12 @@ no values in the `user` field and thus would not match the `exists` filter: { "user": [] } { "foo": "bar" } -------------------------------------------------- +// NOTCONSOLE ==== `missing` query -'missing' query has been removed because it can be advantageously replaced by an `exists` query inside a must_not -clause as follows: +There isn't a `missing` query. Instead use the `exists` query inside a +`must_not` clause as follows: [source,js] -------------------------------------------------- @@ -97,4 +111,3 @@ GET /_search // CONSOLE This query returns documents that have no value in the user field. - diff --git a/docs/reference/query-dsl/geo-shape-query.asciidoc b/docs/reference/query-dsl/geo-shape-query.asciidoc index 12203061336..00fd3b5609b 100644 --- a/docs/reference/query-dsl/geo-shape-query.asciidoc +++ b/docs/reference/query-dsl/geo-shape-query.asciidoc @@ -6,7 +6,7 @@ Filter documents indexed using the `geo_shape` type. Requires the <>. The `geo_shape` query uses the same grid square representation as the -geo_shape mapping to find documents that have a shape that intersects +`geo_shape` mapping to find documents that have a shape that intersects with the query shape. It will also use the same PrefixTree configuration as defined for the field mapping. @@ -17,28 +17,44 @@ examples. ==== Inline Shape Definition -Similar to the `geo_shape` type, the `geo_shape` Filter uses +Similar to the `geo_shape` type, the `geo_shape` query uses http://www.geojson.org[GeoJSON] to represent shapes. -Given a document that looks like this: +Given the following index: [source,js] -------------------------------------------------- +PUT /example +{ + "mappings": { + "doc": { + "properties": { + "location": { + "type": "geo_shape" + } + } + } + } +} + +POST /example/doc?refresh { "name": "Wind & Wetter, Berlin, Germany", - "location": { - "type": "Point", - "coordinates": [13.400544, 52.530286] - } + "location": { + "type": "point", + "coordinates": [13.400544, 52.530286] + } } -------------------------------------------------- +// CONSOLE +// TESTSETUP The following query will find the point using the Elasticsearch's `envelope` GeoJSON extension: [source,js] -------------------------------------------------- -GET /_search +GET /example/_search { "query":{ "bool": { @@ -83,25 +99,43 @@ shape: [source,js] -------------------------------------------------- -GET /_search +PUT /shapes +{ + "mappings": { + "doc": { + "properties": { + "location": { + "type": "geo_shape" + } + } + } + } +} + +PUT /shapes/doc/deu +{ + "location": { + "type": "envelope", + "coordinates" : [[13.0, 53.0], [14.0, 52.0]] + } +} + +GET /example/_search { "query": { "bool": { - "must": { - "match_all": {} - }, - "filter": { - "geo_shape": { - "location": { - "indexed_shape": { - "id": "DEU", - "type": "countries", - "index": "shapes", - "path": "location" - } + "filter": { + "geo_shape": { + "location": { + "indexed_shape": { + "index": "shapes", + "type": "doc", + "id": "deu", + "path": "location" } } } + } } } } diff --git a/docs/reference/search/field-caps.asciidoc b/docs/reference/search/field-caps.asciidoc new file mode 100644 index 00000000000..d327362f81c --- /dev/null +++ b/docs/reference/search/field-caps.asciidoc @@ -0,0 +1,126 @@ +[[search-field-caps]] +== Field Capabilities API + +experimental[] + +The field capabilities API allows to retrieve the capabilities of fields among multiple indices. + +The field capabilities api by default executes on all indices: + +[source,js] +-------------------------------------------------- +GET _field_caps?fields=rating +-------------------------------------------------- +// CONSOLE + +... but the request can also be restricted to specific indices: + +[source,js] +-------------------------------------------------- +GET twitter/_field_caps?fields=rating +-------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] + +Alternatively the `fields` option can also be defined in the request body: + +[source,js] +-------------------------------------------------- +POST _field_caps +{ + "fields" : ["rating"] +} +-------------------------------------------------- +// CONSOLE + +This is equivalent to the previous request. + +Supported request options: + +[horizontal] +`fields`:: A list of fields to compute stats for. The field name supports wildcard notation. For example, using `text_*` + will cause all fields that match the expression to be returned. + +[float] +=== Field Capabilities + +The field capabilities api returns the following information per field: + +[horizontal] +`is_searchable`:: + +Whether this field is indexed for search on all indices. + +`is_aggregatable`:: + +Whether this field can be aggregated on all indices. + +`indices`:: + +The list of indices where this field has the same type, +or null if all indices have the same type for the field. + +`non_searchable_indices`:: + +The list of indices where this field is not searchable, +or null if all indices have the same definition for the field. + +`non_aggregatable_indices`:: + +The list of indices where this field is not aggregatable, +or null if all indices have the same definition for the field. + + +[float] +=== Response format + +Request: + +[source,js] +-------------------------------------------------- +GET _field_caps?fields=rating,title +-------------------------------------------------- +// CONSOLE + +[source,js] +-------------------------------------------------- +{ + "fields": { + "rating": { <1> + "long": { + "is_searchable": true, + "is_aggregatable": false, + "indices": ["index1", "index2"], + "non_aggregatable_indices": ["index1"] <2> + }, + "keyword": { + "is_searchable": false, + "is_aggregatable": true, + "indices": ["index3", "index4"], + "non_searchable_indices": ["index4"] <3> + } + }, + "title": { <4> + "text": { + "is_searchable": true, + "is_aggregatable": false + + } + } + } +} +-------------------------------------------------- +// NOTCONSOLE + +<1> The field `rating` is defined as a long in `index1` and `index2` +and as a `keyword` in `index3` and `index4`. +<2> The field `rating` is not aggregatable in `index1`. +<3> The field `rating` is not searchable in `index4`. +<4> The field `title` is defined as `text` in all indices. + + + + + + + diff --git a/docs/reference/search/request/preference.asciidoc b/docs/reference/search/request/preference.asciidoc index ea9f4fea875..d0f60d700a8 100644 --- a/docs/reference/search/request/preference.asciidoc +++ b/docs/reference/search/request/preference.asciidoc @@ -1,9 +1,8 @@ [[search-request-preference]] === Preference -Controls a `preference` of which shard replicas to execute the search -request on. By default, the operation is randomized between the shard -replicas. +Controls a `preference` of which shard copies on which to execute the +search. By default, the operation is randomized among the available shard copies. The `preference` is a query string parameter which can be set to: diff --git a/docs/reference/setup/bootstrap-checks.asciidoc b/docs/reference/setup/bootstrap-checks.asciidoc index 22c3f03cd36..6f32d5054fb 100644 --- a/docs/reference/setup/bootstrap-checks.asciidoc +++ b/docs/reference/setup/bootstrap-checks.asciidoc @@ -31,16 +31,31 @@ Elasticsearch instances must be reachable via transport communication so they must bind transport to an external interface. Thus, we consider an Elasticsearch instance to be in development mode if it does not bind transport to an external interface (the default), and is otherwise in -production mode if it does bind transport to an external interface. Note -that HTTP can be configured independently of transport via +production mode if it does bind transport to an external interface. + +Note that HTTP can be configured independently of transport via <> and <>; this can be useful for configuring a single instance to be reachable via -HTTP for testing purposes without triggering production mode. If you do -want to force enforcement of the bootstrap checks independent of the -binding of the transport protocal, you can set the system property -`es.enforce.bootstrap.checks` to `true` (this can be useful on a -single-node production system that does not bind transport to an external -interface). +HTTP for testing purposes without triggering production mode. + +We recognize that some users need to bind transport to an external +interface for testing their usage of the transport client. For this +situation, we provide the discovery type `single-node` (configure it by +setting `discovery.type` to `single-node`); in this situation, a node +will elect itself master and will not form a cluster with any other +node. + +If you are running a single node in production, it is possible to evade +the bootstrap checks (either by not binding transport to an external +interface, or by binding transport to an external interface and setting +the discovery type to `single-node`). For this situation, you can force +execution of the bootstrap checks by setting the system property +`es.enforce.bootstrap.checks` to `true` (set this in <>, or +by adding `-Des.enforce.bootstrap.checks=true` to the environment +variable `ES_JAVA_OPTS`). We strongly encourage you to do this if you +are in this specific situation. This system property can be used to +force execution of the bootstrap checks independent of the node +configuration. === Heap size check @@ -48,11 +63,11 @@ If a JVM is started with unequal initial and max heap size, it can be prone to pauses as the JVM heap is resized during system usage. To avoid these resize pauses, it's best to start the JVM with the initial heap size equal to the maximum heap size. Additionally, if -<> is enabled, the JVM will -lock the initial size of the heap on startup. If the initial heap size -is not equal to the maximum heap size, after a resize it will not be the -case that all of the JVM heap is locked in memory. To pass the heap size -check, you must configure the <>. +<> is enabled, the JVM +will lock the initial size of the heap on startup. If the initial heap +size is not equal to the maximum heap size, after a resize it will not +be the case that all of the JVM heap is locked in memory. To pass the +heap size check, you must configure the <>. === File descriptor check @@ -76,13 +91,13 @@ Elasticsearch would much rather use to service requests. There are several ways to configure a system to disallow swapping. One way is by requesting the JVM to lock the heap in memory through `mlockall` (Unix) or virtual lock (Windows). This is done via the Elasticsearch setting -<>. However, there are cases -where this setting can be passed to Elasticsearch but Elasticsearch is -not able to lock the heap (e.g., if the `elasticsearch` user does not -have `memlock unlimited`). The memory lock check verifies that *if* the -`bootstrap.memory_lock` setting is enabled, that the JVM was successfully -able to lock the heap. To pass the memory lock check, you might have to -configure <>. +<>. However, there are +cases where this setting can be passed to Elasticsearch but +Elasticsearch is not able to lock the heap (e.g., if the `elasticsearch` +user does not have `memlock unlimited`). The memory lock check verifies +that *if* the `bootstrap.memory_lock` setting is enabled, that the JVM +was successfully able to lock the heap. To pass the memory lock check, +you might have to configure <>. === Maximum number of threads check @@ -139,29 +154,30 @@ the server VM. === Use serial collector check -There are various garbage collectors for the OpenJDK-derived JVMs targeting -different workloads. The serial collector in particular is best suited for -single logical CPU machines or extremely small heaps, neither of which are -suitable for running Elasticsearch. Using the serial collector with -Elasticsearch can be devastating for performance. The serial collector check -ensures that Elasticsearch is not configured to run with the serial -collector. To pass the serial collector check, you must not start Elasticsearch -with the serial collector (whether it's from the defaults for the JVM that -you're using, or you've explicitly specified it with `-XX:+UseSerialGC`). Note -that the default JVM configuration that ship with Elasticsearch configures -Elasticsearch to use the CMS collector. +There are various garbage collectors for the OpenJDK-derived JVMs +targeting different workloads. The serial collector in particular is +best suited for single logical CPU machines or extremely small heaps, +neither of which are suitable for running Elasticsearch. Using the +serial collector with Elasticsearch can be devastating for performance. +The serial collector check ensures that Elasticsearch is not configured +to run with the serial collector. To pass the serial collector check, +you must not start Elasticsearch with the serial collector (whether it's +from the defaults for the JVM that you're using, or you've explicitly +specified it with `-XX:+UseSerialGC`). Note that the default JVM +configuration that ship with Elasticsearch configures Elasticsearch to +use the CMS collector. === System call filter check - -Elasticsearch installs system call filters of various flavors depending on the -operating system (e.g., seccomp on Linux). These system call filters are -installed to prevent the ability to execute system calls related to forking as -a defense mechanism against arbitrary code execution attacks on Elasticsearch -The system call filter check ensures that if system call filters are enabled, -then they were successfully installed. To pass the system call filter check you -must either fix any configuration errors on your system that prevented system -call filters from installing (check your logs), or *at your own risk* disable -system call filters by setting `bootstrap.system_call_filter` to `false`. +Elasticsearch installs system call filters of various flavors depending +on the operating system (e.g., seccomp on Linux). These system call +filters are installed to prevent the ability to execute system calls +related to forking as a defense mechanism against arbitrary code +execution attacks on Elasticsearch The system call filter check ensures +that if system call filters are enabled, then they were successfully +installed. To pass the system call filter check you must either fix any +configuration errors on your system that prevented system call filters +from installing (check your logs), or *at your own risk* disable system +call filters by setting `bootstrap.system_call_filter` to `false`. === OnError and OnOutOfMemoryError checks @@ -179,10 +195,17 @@ use the JVM flag `ExitOnOutOfMemoryError`. While this does not have the full capabilities of `OnError` nor `OnOutOfMemoryError`, arbitrary forking will not be supported with seccomp enabled. +=== Early-access check + +The OpenJDK project provides early-access snapshots of upcoming releases. These +releases are not suitable for production. The early-access check detects these +early-access snapshots. To pass this check, you must start Elasticsearch on a +release build of the JVM. + === G1GC check -Early versions of the HotSpot JVM that shipped with JDK 8 are known to have -issues that can lead to index corruption when the G1GC collector is enabled. -The versions impacted are those earlier than the version of HotSpot that -shipped with JDK 8u40. The G1GC check detects these early versions of the -HotSpot JVM. +Early versions of the HotSpot JVM that shipped with JDK 8 are known to +have issues that can lead to index corruption when the G1GC collector is +enabled. The versions impacted are those earlier than the version of +HotSpot that shipped with JDK 8u40. The G1GC check detects these early +versions of the HotSpot JVM. diff --git a/docs/src/test/cluster/config/analysis/example_word_list.txt b/docs/src/test/cluster/config/analysis/example_word_list.txt new file mode 100644 index 00000000000..f79aea42af2 --- /dev/null +++ b/docs/src/test/cluster/config/analysis/example_word_list.txt @@ -0,0 +1,4 @@ +test +list +of +words diff --git a/docs/src/test/cluster/config/analysis/hyphenation_patterns.xml b/docs/src/test/cluster/config/analysis/hyphenation_patterns.xml new file mode 100644 index 00000000000..6241b3fc6cc --- /dev/null +++ b/docs/src/test/cluster/config/analysis/hyphenation_patterns.xml @@ -0,0 +1,21 @@ + + + + + + + + + + + +aA + + + + + + +.a2 + + diff --git a/docs/src/test/cluster/config/scripts/calculate_score.painless b/docs/src/test/cluster/config/scripts/calculate_score.painless new file mode 100644 index 00000000000..0fad3fc59f9 --- /dev/null +++ b/docs/src/test/cluster/config/scripts/calculate_score.painless @@ -0,0 +1 @@ +Math.log(_score * 2) + params.my_modifier diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorFactoryTests.java index e70bc3434ee..072aa6faf84 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorFactoryTests.java @@ -51,7 +51,7 @@ public class AppendProcessorFactoryTests extends ESTestCase { value = Arrays.asList("value1", "value2", "value3"); } config.put("value", value); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); AppendProcessor appendProcessor = factory.create(null, processorTag, config); assertThat(appendProcessor.getTag(), equalTo(processorTag)); assertThat(appendProcessor.getField().execute(Collections.emptyMap()), equalTo("field1")); @@ -97,7 +97,7 @@ public class AppendProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); config.put("value", "value1"); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> factory.create(null, processorTag, config)); assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); assertThat(exception.getHeader("processor_tag").get(0), equalTo(processorTag)); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java index 16df08721d3..fbbfb8a8440 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java @@ -130,13 +130,13 @@ public class AppendProcessorTests extends ESTestCase { List values = new ArrayList<>(); Processor appendProcessor; if (randomBoolean()) { - String value = randomAsciiOfLengthBetween(1, 10); + String value = randomAlphaOfLengthBetween(1, 10); values.add(value); appendProcessor = createAppendProcessor(randomMetaData.getFieldName(), value); } else { int valuesSize = randomIntBetween(0, 10); for (int i = 0; i < valuesSize; i++) { - values.add(randomAsciiOfLengthBetween(1, 10)); + values.add(randomAlphaOfLengthBetween(1, 10)); } appendProcessor = createAppendProcessor(randomMetaData.getFieldName(), values); } @@ -158,7 +158,7 @@ public class AppendProcessorTests extends ESTestCase { private static Processor createAppendProcessor(String fieldName, Object fieldValue) { TemplateService templateService = TestTemplateService.instance(); - return new AppendProcessor(randomAsciiOfLength(10), templateService.compile(fieldName), ValueSource.wrap(fieldValue, + return new AppendProcessor(randomAlphaOfLength(10), templateService.compile(fieldName), ValueSource.wrap(fieldValue, templateService)); } @@ -186,7 +186,7 @@ public class AppendProcessorTests extends ESTestCase { }, STRING { @Override Object randomValue() { - return randomAsciiOfLengthBetween(1, 10); + return randomAlphaOfLengthBetween(1, 10); } }, MAP { @Override @@ -194,7 +194,7 @@ public class AppendProcessorTests extends ESTestCase { int numItems = randomIntBetween(1, 10); Map map = new HashMap<>(numItems); for (int i = 0; i < numItems; i++) { - map.put(randomAsciiOfLengthBetween(1, 10), randomFrom(Scalar.values()).randomValue()); + map.put(randomAlphaOfLengthBetween(1, 10), randomFrom(Scalar.values()).randomValue()); } return map; } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorFactoryTests.java index afdb7f7841d..9e4acd7b17f 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorFactoryTests.java @@ -38,7 +38,7 @@ public class ConvertProcessorFactoryTests extends ESTestCase { ConvertProcessor.Type type = randomFrom(ConvertProcessor.Type.values()); config.put("field", "field1"); config.put("type", type.toString()); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); ConvertProcessor convertProcessor = factory.create(null, processorTag, config); assertThat(convertProcessor.getTag(), equalTo(processorTag)); assertThat(convertProcessor.getField(), equalTo("field1")); @@ -50,7 +50,7 @@ public class ConvertProcessorFactoryTests extends ESTestCase { public void testCreateUnsupportedType() throws Exception { ConvertProcessor.Factory factory = new ConvertProcessor.Factory(); Map config = new HashMap<>(); - String type = "type-" + randomAsciiOfLengthBetween(1, 10); + String type = "type-" + randomAlphaOfLengthBetween(1, 10); config.put("field", "field1"); config.put("type", type); try { @@ -67,7 +67,7 @@ public class ConvertProcessorFactoryTests extends ESTestCase { public void testCreateNoFieldPresent() throws Exception { ConvertProcessor.Factory factory = new ConvertProcessor.Factory(); Map config = new HashMap<>(); - String type = "type-" + randomAsciiOfLengthBetween(1, 10); + String type = "type-" + randomAlphaOfLengthBetween(1, 10); config.put("type", type); try { factory.create(null, null, config); @@ -96,7 +96,7 @@ public class ConvertProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("target_field", "field2"); config.put("type", type.toString()); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); ConvertProcessor convertProcessor = factory.create(null, processorTag, config); assertThat(convertProcessor.getTag(), equalTo(processorTag)); assertThat(convertProcessor.getField(), equalTo("field1")); @@ -112,7 +112,7 @@ public class ConvertProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("type", type.toString()); config.put("ignore_missing", true); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); ConvertProcessor convertProcessor = factory.create(null, processorTag, config); assertThat(convertProcessor.getTag(), equalTo(processorTag)); assertThat(convertProcessor.getField(), equalTo("field1")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorTests.java index f8a8a24286f..7cab5fff45c 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorTests.java @@ -43,7 +43,7 @@ public class ConvertProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); int randomInt = randomInt(); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, randomInt); - Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, fieldName, Type.INTEGER, false); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.INTEGER, false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, Integer.class), equalTo(randomInt)); } @@ -59,7 +59,7 @@ public class ConvertProcessorTests extends ESTestCase { expectedList.add(randomInt); } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, fieldName, Type.INTEGER, false); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.INTEGER, false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, List.class), equalTo(expectedList)); } @@ -67,10 +67,10 @@ public class ConvertProcessorTests extends ESTestCase { public void testConvertIntError() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - String value = "string-" + randomAsciiOfLengthBetween(1, 10); + String value = "string-" + randomAlphaOfLengthBetween(1, 10); ingestDocument.setFieldValue(fieldName, value); - Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, fieldName, Type.INTEGER, false); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.INTEGER, false); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -86,7 +86,7 @@ public class ConvertProcessorTests extends ESTestCase { String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, randomFloat); expectedResult.put(fieldName, randomFloat); - Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, fieldName, Type.FLOAT, false); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.FLOAT, false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, Float.class), equalTo(randomFloat)); } @@ -102,7 +102,7 @@ public class ConvertProcessorTests extends ESTestCase { expectedList.add(randomFloat); } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, fieldName, Type.FLOAT, false); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.FLOAT, false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, List.class), equalTo(expectedList)); } @@ -110,10 +110,10 @@ public class ConvertProcessorTests extends ESTestCase { public void testConvertFloatError() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - String value = "string-" + randomAsciiOfLengthBetween(1, 10); + String value = "string-" + randomAlphaOfLengthBetween(1, 10); ingestDocument.setFieldValue(fieldName, value); - Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, fieldName, Type.FLOAT, false); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.FLOAT, false); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -131,7 +131,7 @@ public class ConvertProcessorTests extends ESTestCase { } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, booleanString); - Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, fieldName, Type.BOOLEAN, false); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.BOOLEAN, false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, Boolean.class), equalTo(randomBoolean)); } @@ -151,7 +151,7 @@ public class ConvertProcessorTests extends ESTestCase { expectedList.add(randomBoolean); } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, fieldName, Type.BOOLEAN, false); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.BOOLEAN, false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, List.class), equalTo(expectedList)); } @@ -161,14 +161,14 @@ public class ConvertProcessorTests extends ESTestCase { String fieldName = RandomDocumentPicks.randomFieldName(random()); String fieldValue; if (randomBoolean()) { - fieldValue = "string-" + randomAsciiOfLengthBetween(1, 10); + fieldValue = "string-" + randomAlphaOfLengthBetween(1, 10); } else { //verify that only proper boolean values are supported and we are strict about it fieldValue = randomFrom("on", "off", "yes", "no", "0", "1"); } ingestDocument.setFieldValue(fieldName, fieldValue); - Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, fieldName, Type.BOOLEAN, false); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.BOOLEAN, false); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -202,7 +202,7 @@ public class ConvertProcessorTests extends ESTestCase { } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, fieldName, Type.STRING, false); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.STRING, false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, String.class), equalTo(expectedFieldValue)); } @@ -238,7 +238,7 @@ public class ConvertProcessorTests extends ESTestCase { expectedList.add(randomValueString); } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, fieldName, Type.STRING, false); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, Type.STRING, false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, List.class), equalTo(expectedList)); } @@ -247,7 +247,7 @@ public class ConvertProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); Type type = randomFrom(Type.values()); - Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, fieldName, type, false); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, type, false); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -259,7 +259,7 @@ public class ConvertProcessorTests extends ESTestCase { public void testConvertNullField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null)); Type type = randomFrom(Type.values()); - Processor processor = new ConvertProcessor(randomAsciiOfLength(10), "field", "field", type, false); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), "field", "field", type, false); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -273,7 +273,7 @@ public class ConvertProcessorTests extends ESTestCase { IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); String fieldName = RandomDocumentPicks.randomFieldName(random()); Type type = randomFrom(Type.values()); - Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, fieldName, type, true); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, fieldName, type, true); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); } @@ -282,7 +282,7 @@ public class ConvertProcessorTests extends ESTestCase { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null)); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); Type type = randomFrom(Type.values()); - Processor processor = new ConvertProcessor(randomAsciiOfLength(10), "field", "field", type, true); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), "field", "field", type, true); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); } @@ -306,7 +306,7 @@ public class ConvertProcessorTests extends ESTestCase { throw new UnsupportedOperationException(); } IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", randomValue)); - Processor processor = new ConvertProcessor(randomAsciiOfLength(10), "field", "field", Type.AUTO, false); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), "field", "field", Type.AUTO, false); processor.execute(ingestDocument); Object convertedValue = ingestDocument.getFieldValue("field", Object.class); assertThat(convertedValue, sameInstance(randomValue)); @@ -315,7 +315,7 @@ public class ConvertProcessorTests extends ESTestCase { public void testAutoConvertStringNotMatched() throws Exception { String value = "notAnIntFloatOrBool"; IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", value)); - Processor processor = new ConvertProcessor(randomAsciiOfLength(10), "field", "field", Type.AUTO, false); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), "field", "field", Type.AUTO, false); processor.execute(ingestDocument); Object convertedValue = ingestDocument.getFieldValue("field", Object.class); assertThat(convertedValue, sameInstance(value)); @@ -326,7 +326,7 @@ public class ConvertProcessorTests extends ESTestCase { String booleanString = Boolean.toString(randomBoolean); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", booleanString)); - Processor processor = new ConvertProcessor(randomAsciiOfLength(10), "field", "field", Type.AUTO, false); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), "field", "field", Type.AUTO, false); processor.execute(ingestDocument); Object convertedValue = ingestDocument.getFieldValue("field", Object.class); assertThat(convertedValue, equalTo(randomBoolean)); @@ -336,7 +336,7 @@ public class ConvertProcessorTests extends ESTestCase { int randomInt = randomInt(); String randomString = Integer.toString(randomInt); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", randomString)); - Processor processor = new ConvertProcessor(randomAsciiOfLength(10), "field", "field", Type.AUTO, false); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), "field", "field", Type.AUTO, false); processor.execute(ingestDocument); Object convertedValue = ingestDocument.getFieldValue("field", Object.class); assertThat(convertedValue, equalTo(randomInt)); @@ -346,7 +346,7 @@ public class ConvertProcessorTests extends ESTestCase { float randomFloat = randomFloat(); String randomString = Float.toString(randomFloat); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", randomString)); - Processor processor = new ConvertProcessor(randomAsciiOfLength(10), "field", "field", Type.AUTO, false); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), "field", "field", Type.AUTO, false); processor.execute(ingestDocument); Object convertedValue = ingestDocument.getFieldValue("field", Object.class); assertThat(convertedValue, equalTo(randomFloat)); @@ -356,8 +356,8 @@ public class ConvertProcessorTests extends ESTestCase { IngestDocument ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>()); int randomInt = randomInt(); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, String.valueOf(randomInt)); - String targetField = fieldName + randomAsciiOfLength(5); - Processor processor = new ConvertProcessor(randomAsciiOfLength(10), fieldName, targetField, Type.INTEGER, false); + String targetField = fieldName + randomAlphaOfLength(5); + Processor processor = new ConvertProcessor(randomAlphaOfLength(10), fieldName, targetField, Type.INTEGER, false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, String.class), equalTo(String.valueOf(randomInt))); assertThat(ingestDocument.getFieldValue(targetField, Integer.class), equalTo(randomInt)); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateFormatTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateFormatTests.java index 0885371f867..886630dbbd9 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateFormatTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateFormatTests.java @@ -81,6 +81,6 @@ public class DateFormatTests extends ESTestCase { assertThat(DateFormat.fromString("iso8601"), equalTo(DateFormat.Joda)); assertThat(DateFormat.fromString("TAI64N"), equalTo(DateFormat.Tai64n)); assertThat(DateFormat.fromString("tai64n"), equalTo(DateFormat.Joda)); - assertThat(DateFormat.fromString("prefix-" + randomAsciiOfLengthBetween(1, 10)), equalTo(DateFormat.Joda)); + assertThat(DateFormat.fromString("prefix-" + randomAlphaOfLengthBetween(1, 10)), equalTo(DateFormat.Joda)); } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorFactoryTests.java index 95ad68bb110..2dc16ad7bd7 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorFactoryTests.java @@ -37,10 +37,10 @@ public class DateProcessorFactoryTests extends ESTestCase { public void testBuildDefaults() throws Exception { DateProcessor.Factory factory = new DateProcessor.Factory(); Map config = new HashMap<>(); - String sourceField = randomAsciiOfLengthBetween(1, 10); + String sourceField = randomAlphaOfLengthBetween(1, 10); config.put("field", sourceField); config.put("formats", Collections.singletonList("dd/MM/yyyyy")); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); DateProcessor processor = factory.create(null, processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getField(), equalTo(sourceField)); @@ -53,7 +53,7 @@ public class DateProcessorFactoryTests extends ESTestCase { public void testMatchFieldIsMandatory() throws Exception { DateProcessor.Factory factory = new DateProcessor.Factory(); Map config = new HashMap<>(); - String targetField = randomAsciiOfLengthBetween(1, 10); + String targetField = randomAlphaOfLengthBetween(1, 10); config.put("target_field", targetField); config.put("formats", Collections.singletonList("dd/MM/yyyyy")); @@ -68,8 +68,8 @@ public class DateProcessorFactoryTests extends ESTestCase { public void testMatchFormatsIsMandatory() throws Exception { DateProcessor.Factory factory = new DateProcessor.Factory(); Map config = new HashMap<>(); - String sourceField = randomAsciiOfLengthBetween(1, 10); - String targetField = randomAsciiOfLengthBetween(1, 10); + String sourceField = randomAlphaOfLengthBetween(1, 10); + String targetField = randomAlphaOfLengthBetween(1, 10); config.put("field", sourceField); config.put("target_field", targetField); @@ -84,7 +84,7 @@ public class DateProcessorFactoryTests extends ESTestCase { public void testParseLocale() throws Exception { DateProcessor.Factory factory = new DateProcessor.Factory(); Map config = new HashMap<>(); - String sourceField = randomAsciiOfLengthBetween(1, 10); + String sourceField = randomAlphaOfLengthBetween(1, 10); config.put("field", sourceField); config.put("formats", Collections.singletonList("dd/MM/yyyyy")); Locale locale = randomLocale(random()); @@ -97,7 +97,7 @@ public class DateProcessorFactoryTests extends ESTestCase { public void testParseInvalidLocale() throws Exception { DateProcessor.Factory factory = new DateProcessor.Factory(); Map config = new HashMap<>(); - String sourceField = randomAsciiOfLengthBetween(1, 10); + String sourceField = randomAlphaOfLengthBetween(1, 10); config.put("field", sourceField); config.put("formats", Collections.singletonList("dd/MM/yyyyy")); config.put("locale", "invalid_locale"); @@ -112,7 +112,7 @@ public class DateProcessorFactoryTests extends ESTestCase { public void testParseTimezone() throws Exception { DateProcessor.Factory factory = new DateProcessor.Factory(); Map config = new HashMap<>(); - String sourceField = randomAsciiOfLengthBetween(1, 10); + String sourceField = randomAlphaOfLengthBetween(1, 10); config.put("field", sourceField); config.put("formats", Collections.singletonList("dd/MM/yyyyy")); @@ -125,7 +125,7 @@ public class DateProcessorFactoryTests extends ESTestCase { public void testParseInvalidTimezone() throws Exception { DateProcessor.Factory factory = new DateProcessor.Factory(); Map config = new HashMap<>(); - String sourceField = randomAsciiOfLengthBetween(1, 10); + String sourceField = randomAlphaOfLengthBetween(1, 10); config.put("field", sourceField); config.put("match_formats", Collections.singletonList("dd/MM/yyyyy")); config.put("timezone", "invalid_timezone"); @@ -140,7 +140,7 @@ public class DateProcessorFactoryTests extends ESTestCase { public void testParseMatchFormats() throws Exception { DateProcessor.Factory factory = new DateProcessor.Factory(); Map config = new HashMap<>(); - String sourceField = randomAsciiOfLengthBetween(1, 10); + String sourceField = randomAlphaOfLengthBetween(1, 10); config.put("field", sourceField); config.put("formats", Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy")); @@ -151,7 +151,7 @@ public class DateProcessorFactoryTests extends ESTestCase { public void testParseMatchFormatsFailure() throws Exception { DateProcessor.Factory factory = new DateProcessor.Factory(); Map config = new HashMap<>(); - String sourceField = randomAsciiOfLengthBetween(1, 10); + String sourceField = randomAlphaOfLengthBetween(1, 10); config.put("field", sourceField); config.put("formats", "dd/MM/yyyy"); @@ -166,8 +166,8 @@ public class DateProcessorFactoryTests extends ESTestCase { public void testParseTargetField() throws Exception { DateProcessor.Factory factory = new DateProcessor.Factory(); Map config = new HashMap<>(); - String sourceField = randomAsciiOfLengthBetween(1, 10); - String targetField = randomAsciiOfLengthBetween(1, 10); + String sourceField = randomAlphaOfLengthBetween(1, 10); + String targetField = randomAlphaOfLengthBetween(1, 10); config.put("field", sourceField); config.put("target_field", targetField); config.put("formats", Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java index 8ac5a56abb0..d92f2e84be1 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java @@ -38,7 +38,7 @@ import static org.hamcrest.CoreMatchers.equalTo; public class DateProcessorTests extends ESTestCase { public void testJodaPattern() { - DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.forID("Europe/Amsterdam"), Locale.ENGLISH, + DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), DateTimeZone.forID("Europe/Amsterdam"), Locale.ENGLISH, "date_as_string", Collections.singletonList("yyyy dd MM hh:mm:ss"), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "2010 12 06 11:05:15"); @@ -52,7 +52,7 @@ public class DateProcessorTests extends ESTestCase { matchFormats.add("yyyy dd MM"); matchFormats.add("dd/MM/yyyy"); matchFormats.add("dd-MM-yyyy"); - DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.forID("Europe/Amsterdam"), Locale.ENGLISH, + DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), DateTimeZone.forID("Europe/Amsterdam"), Locale.ENGLISH, "date_as_string", matchFormats, "date_as_date"); Map document = new HashMap<>(); @@ -86,7 +86,7 @@ public class DateProcessorTests extends ESTestCase { public void testInvalidJodaPattern() { try { - new DateProcessor(randomAsciiOfLength(10), DateTimeZone.UTC, randomLocale(random()), + new DateProcessor(randomAlphaOfLength(10), DateTimeZone.UTC, randomLocale(random()), "date_as_string", Collections.singletonList("invalid pattern"), "date_as_date"); fail("date processor initialization should have failed"); } catch(IllegalArgumentException e) { @@ -95,7 +95,7 @@ public class DateProcessorTests extends ESTestCase { } public void testJodaPatternLocale() { - DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.forID("Europe/Amsterdam"), Locale.ITALIAN, + DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), DateTimeZone.forID("Europe/Amsterdam"), Locale.ITALIAN, "date_as_string", Collections.singletonList("yyyy dd MMM"), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "2010 12 giugno"); @@ -105,7 +105,7 @@ public class DateProcessorTests extends ESTestCase { } public void testJodaPatternDefaultYear() { - DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.forID("Europe/Amsterdam"), Locale.ENGLISH, + DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), DateTimeZone.forID("Europe/Amsterdam"), Locale.ENGLISH, "date_as_string", Collections.singletonList("dd/MM"), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "12/06"); @@ -116,7 +116,7 @@ public class DateProcessorTests extends ESTestCase { } public void testTAI64N() { - DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.forOffsetHours(2), randomLocale(random()), + DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), DateTimeZone.forOffsetHours(2), randomLocale(random()), "date_as_string", Collections.singletonList("TAI64N"), "date_as_date"); Map document = new HashMap<>(); String dateAsString = (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024"; @@ -127,7 +127,7 @@ public class DateProcessorTests extends ESTestCase { } public void testUnixMs() { - DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.UTC, randomLocale(random()), + DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), DateTimeZone.UTC, randomLocale(random()), "date_as_string", Collections.singletonList("UNIX_MS"), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "1000500"); @@ -137,7 +137,7 @@ public class DateProcessorTests extends ESTestCase { } public void testUnix() { - DateProcessor dateProcessor = new DateProcessor(randomAsciiOfLength(10), DateTimeZone.UTC, randomLocale(random()), + DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), DateTimeZone.UTC, randomLocale(random()), "date_as_string", Collections.singletonList("UNIX"), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "1000.5"); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorFactoryTests.java index 217a15cf5b3..ab0acad4aea 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorFactoryTests.java @@ -43,7 +43,7 @@ public class FailProcessorFactoryTests extends ESTestCase { public void testCreate() throws Exception { Map config = new HashMap<>(); config.put("message", "error"); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); FailProcessor failProcessor = factory.create(null, processorTag, config); assertThat(failProcessor.getTag(), equalTo(processorTag)); assertThat(failProcessor.getMessage().execute(Collections.emptyMap()), equalTo("error")); @@ -63,7 +63,7 @@ public class FailProcessorFactoryTests extends ESTestCase { FailProcessor.Factory factory = new FailProcessor.Factory(TestTemplateService.instance(true)); Map config = new HashMap<>(); config.put("message", "error"); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> factory.create(null, processorTag, config)); assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); assertThat(exception.getHeader("processor_tag").get(0), equalTo(processorTag)); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorTests.java index 76685854d21..c05323c12e3 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorTests.java @@ -31,8 +31,8 @@ public class FailProcessorTests extends ESTestCase { public void test() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - String message = randomAsciiOfLength(10); - Processor processor = new FailProcessor(randomAsciiOfLength(10), new TestTemplateService.MockTemplate(message)); + String message = randomAlphaOfLength(10); + Processor processor = new FailProcessor(randomAlphaOfLength(10), new TestTemplateService.MockTemplate(message)); try { processor.execute(ingestDocument); fail("fail processor should throw an exception"); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorFactoryTests.java index 37aa8adca9a..4cac94cd5b5 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorFactoryTests.java @@ -38,7 +38,7 @@ public class GrokProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); config.put("patterns", Collections.singletonList("(?\\w+)")); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); GrokProcessor processor = factory.create(null, processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getMatchField(), equalTo("_field")); @@ -53,7 +53,7 @@ public class GrokProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("patterns", Collections.singletonList("(?\\w+)")); config.put("ignore_missing", true); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); GrokProcessor processor = factory.create(null, processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getMatchField(), equalTo("_field")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorTests.java index 24d775db682..86bc780b75e 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorTests.java @@ -38,7 +38,7 @@ public class GrokProcessorTests extends ESTestCase { String fieldName = RandomDocumentPicks.randomFieldName(random()); IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); doc.setFieldValue(fieldName, "1"); - GrokProcessor processor = new GrokProcessor(randomAsciiOfLength(10), Collections.singletonMap("ONE", "1"), + GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), Collections.singletonMap("ONE", "1"), Collections.singletonList("%{ONE:one}"), fieldName, false, false); processor.execute(doc); assertThat(doc.getFieldValue("one", String.class), equalTo("1")); @@ -48,7 +48,7 @@ public class GrokProcessorTests extends ESTestCase { String fieldName = RandomDocumentPicks.randomFieldName(random()); IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); doc.setFieldValue(fieldName, "23"); - GrokProcessor processor = new GrokProcessor(randomAsciiOfLength(10), Collections.singletonMap("ONE", "1"), + GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), Collections.singletonMap("ONE", "1"), Collections.singletonList("%{ONE:one}"), fieldName, false, false); Exception e = expectThrows(Exception.class, () -> processor.execute(doc)); assertThat(e.getMessage(), equalTo("Provided Grok expressions do not match field value: [23]")); @@ -59,7 +59,7 @@ public class GrokProcessorTests extends ESTestCase { IngestDocument originalDoc = new IngestDocument(new HashMap<>(), new HashMap<>()); originalDoc.setFieldValue(fieldName, fieldName); IngestDocument doc = new IngestDocument(originalDoc); - GrokProcessor processor = new GrokProcessor(randomAsciiOfLength(10), Collections.emptyMap(), + GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), Collections.emptyMap(), Collections.singletonList(fieldName), fieldName, false, false); processor.execute(doc); assertThat(doc, equalTo(originalDoc)); @@ -69,7 +69,7 @@ public class GrokProcessorTests extends ESTestCase { String fieldName = RandomDocumentPicks.randomFieldName(random()); IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); doc.setFieldValue(fieldName, null); - GrokProcessor processor = new GrokProcessor(randomAsciiOfLength(10), Collections.singletonMap("ONE", "1"), + GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), Collections.singletonMap("ONE", "1"), Collections.singletonList("%{ONE:one}"), fieldName, false, false); Exception e = expectThrows(Exception.class, () -> processor.execute(doc)); assertThat(e.getMessage(), equalTo("field [" + fieldName + "] is null, cannot process it.")); @@ -80,7 +80,7 @@ public class GrokProcessorTests extends ESTestCase { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); originalIngestDocument.setFieldValue(fieldName, null); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); - GrokProcessor processor = new GrokProcessor(randomAsciiOfLength(10), Collections.singletonMap("ONE", "1"), + GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), Collections.singletonMap("ONE", "1"), Collections.singletonList("%{ONE:one}"), fieldName, false, true); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); @@ -90,7 +90,7 @@ public class GrokProcessorTests extends ESTestCase { String fieldName = RandomDocumentPicks.randomFieldName(random()); IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); doc.setFieldValue(fieldName, 1); - GrokProcessor processor = new GrokProcessor(randomAsciiOfLength(10), Collections.singletonMap("ONE", "1"), + GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), Collections.singletonMap("ONE", "1"), Collections.singletonList("%{ONE:one}"), fieldName, false, false); Exception e = expectThrows(Exception.class, () -> processor.execute(doc)); assertThat(e.getMessage(), equalTo("field [" + fieldName + "] of type [java.lang.Integer] cannot be cast to [java.lang.String]")); @@ -100,7 +100,7 @@ public class GrokProcessorTests extends ESTestCase { String fieldName = RandomDocumentPicks.randomFieldName(random()); IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); doc.setFieldValue(fieldName, 1); - GrokProcessor processor = new GrokProcessor(randomAsciiOfLength(10), Collections.singletonMap("ONE", "1"), + GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), Collections.singletonMap("ONE", "1"), Collections.singletonList("%{ONE:one}"), fieldName, false, true); Exception e = expectThrows(Exception.class, () -> processor.execute(doc)); assertThat(e.getMessage(), equalTo("field [" + fieldName + "] of type [java.lang.Integer] cannot be cast to [java.lang.String]")); @@ -109,7 +109,7 @@ public class GrokProcessorTests extends ESTestCase { public void testMissingField() { String fieldName = "foo.bar"; IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); - GrokProcessor processor = new GrokProcessor(randomAsciiOfLength(10), Collections.singletonMap("ONE", "1"), + GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), Collections.singletonMap("ONE", "1"), Collections.singletonList("%{ONE:one}"), fieldName, false, false); Exception e = expectThrows(Exception.class, () -> processor.execute(doc)); assertThat(e.getMessage(), equalTo("field [foo] not present as part of path [foo.bar]")); @@ -119,7 +119,7 @@ public class GrokProcessorTests extends ESTestCase { String fieldName = "foo.bar"; IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); - GrokProcessor processor = new GrokProcessor(randomAsciiOfLength(10), Collections.singletonMap("ONE", "1"), + GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), Collections.singletonMap("ONE", "1"), Collections.singletonList("%{ONE:one}"), fieldName, false, true); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); @@ -133,7 +133,7 @@ public class GrokProcessorTests extends ESTestCase { patternBank.put("ONE", "1"); patternBank.put("TWO", "2"); patternBank.put("THREE", "3"); - GrokProcessor processor = new GrokProcessor(randomAsciiOfLength(10), patternBank, + GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), patternBank, Arrays.asList("%{ONE:one}", "%{TWO:two}", "%{THREE:three}"), fieldName, false, false); processor.execute(doc); assertThat(doc.hasField("one"), equalTo(false)); @@ -149,7 +149,7 @@ public class GrokProcessorTests extends ESTestCase { patternBank.put("ONE", "1"); patternBank.put("TWO", "2"); patternBank.put("THREE", "3"); - GrokProcessor processor = new GrokProcessor(randomAsciiOfLength(10), patternBank, + GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), patternBank, Arrays.asList("%{ONE:one}", "%{TWO:two}", "%{THREE:three}"), fieldName, true, false); processor.execute(doc); assertThat(doc.hasField("one"), equalTo(false)); @@ -164,7 +164,7 @@ public class GrokProcessorTests extends ESTestCase { doc.setFieldValue(fieldName, "first1"); Map patternBank = new HashMap<>(); patternBank.put("ONE", "1"); - GrokProcessor processor = new GrokProcessor(randomAsciiOfLength(10), patternBank, + GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), patternBank, Arrays.asList("%{ONE:one}"), fieldName, true, false); processor.execute(doc); assertThat(doc.hasField("one"), equalTo(true)); @@ -195,7 +195,7 @@ public class GrokProcessorTests extends ESTestCase { patternBank.put("ONE", "1"); patternBank.put("TWO", "2"); patternBank.put("THREE", "3"); - GrokProcessor processor = new GrokProcessor(randomAsciiOfLength(10), patternBank, + GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), patternBank, Arrays.asList("%{ONE:first}-%{TWO:second}", "%{ONE:first}-%{THREE:second}"), fieldName, randomBoolean(), randomBoolean()); processor.execute(doc); assertThat(doc.getFieldValue("first", String.class), equalTo("1")); @@ -208,7 +208,7 @@ public class GrokProcessorTests extends ESTestCase { doc.setFieldValue(fieldName, "12"); Map patternBank = new HashMap<>(); patternBank.put("ONETWO", "1|2"); - GrokProcessor processor = new GrokProcessor(randomAsciiOfLength(10), patternBank, + GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), patternBank, Collections.singletonList("%{ONETWO:first}%{ONETWO:first}"), fieldName, randomBoolean(), randomBoolean()); processor.execute(doc); assertThat(doc.getFieldValue("first", String.class), equalTo("1")); @@ -221,7 +221,7 @@ public class GrokProcessorTests extends ESTestCase { Map patternBank = new HashMap<>(); patternBank.put("ONETWO", "1|2"); patternBank.put("THREE", "3"); - GrokProcessor processor = new GrokProcessor(randomAsciiOfLength(10), patternBank, + GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), patternBank, Collections.singletonList("%{ONETWO:first}|%{THREE:second}"), fieldName, randomBoolean(), randomBoolean()); processor.execute(doc); assertFalse(doc.hasField("first")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorFactoryTests.java index 51e246c67e9..f7947c21621 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorFactoryTests.java @@ -36,7 +36,7 @@ public class GsubProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("pattern", "\\."); config.put("replacement", "-"); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); GsubProcessor gsubProcessor = factory.create(null, processorTag, config); assertThat(gsubProcessor.getTag(), equalTo(processorTag)); assertThat(gsubProcessor.getField(), equalTo("field1")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorTests.java index 20dc5b8c8de..d791d34722a 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorTests.java @@ -36,7 +36,7 @@ public class GsubProcessorTests extends ESTestCase { public void testGsub() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "127.0.0.1"); - Processor processor = new GsubProcessor(randomAsciiOfLength(10), fieldName, Pattern.compile("\\."), "-"); + Processor processor = new GsubProcessor(randomAlphaOfLength(10), fieldName, Pattern.compile("\\."), "-"); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, String.class), equalTo("127-0-0-1")); } @@ -45,7 +45,7 @@ public class GsubProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); ingestDocument.setFieldValue(fieldName, 123); - Processor processor = new GsubProcessor(randomAsciiOfLength(10), fieldName, Pattern.compile("\\."), "-"); + Processor processor = new GsubProcessor(randomAlphaOfLength(10), fieldName, Pattern.compile("\\."), "-"); try { processor.execute(ingestDocument); fail("processor execution should have failed"); @@ -58,7 +58,7 @@ public class GsubProcessorTests extends ESTestCase { public void testGsubFieldNotFound() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = new GsubProcessor(randomAsciiOfLength(10), fieldName, Pattern.compile("\\."), "-"); + Processor processor = new GsubProcessor(randomAlphaOfLength(10), fieldName, Pattern.compile("\\."), "-"); try { processor.execute(ingestDocument); fail("processor execution should have failed"); @@ -69,7 +69,7 @@ public class GsubProcessorTests extends ESTestCase { public void testGsubNullValue() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null)); - Processor processor = new GsubProcessor(randomAsciiOfLength(10), "field", Pattern.compile("\\."), "-"); + Processor processor = new GsubProcessor(randomAlphaOfLength(10), "field", Pattern.compile("\\."), "-"); try { processor.execute(ingestDocument); fail("processor execution should have failed"); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JoinProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JoinProcessorFactoryTests.java index 68b2daecb4a..0eda639afce 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JoinProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JoinProcessorFactoryTests.java @@ -34,7 +34,7 @@ public class JoinProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); config.put("separator", "-"); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); JoinProcessor joinProcessor = factory.create(null, processorTag, config); assertThat(joinProcessor.getTag(), equalTo(processorTag)); assertThat(joinProcessor.getField(), equalTo("field1")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JoinProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JoinProcessorTests.java index 078d6365709..b7ef9d675c2 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JoinProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JoinProcessorTests.java @@ -43,7 +43,7 @@ public class JoinProcessorTests extends ESTestCase { List fieldValue = new ArrayList<>(numItems); String expectedResult = ""; for (int j = 0; j < numItems; j++) { - String value = randomAsciiOfLengthBetween(1, 10); + String value = randomAlphaOfLengthBetween(1, 10); fieldValue.add(value); expectedResult += value; if (j < numItems - 1) { @@ -51,7 +51,7 @@ public class JoinProcessorTests extends ESTestCase { } } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new JoinProcessor(randomAsciiOfLength(10), fieldName, separator); + Processor processor = new JoinProcessor(randomAlphaOfLength(10), fieldName, separator); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, String.class), equalTo(expectedResult)); } @@ -71,7 +71,7 @@ public class JoinProcessorTests extends ESTestCase { } } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new JoinProcessor(randomAsciiOfLength(10), fieldName, separator); + Processor processor = new JoinProcessor(randomAlphaOfLength(10), fieldName, separator); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, String.class), equalTo(expectedResult)); } @@ -79,8 +79,8 @@ public class JoinProcessorTests extends ESTestCase { public void testJoinNonListField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - ingestDocument.setFieldValue(fieldName, randomAsciiOfLengthBetween(1, 10)); - Processor processor = new JoinProcessor(randomAsciiOfLength(10), fieldName, "-"); + ingestDocument.setFieldValue(fieldName, randomAlphaOfLengthBetween(1, 10)); + Processor processor = new JoinProcessor(randomAlphaOfLength(10), fieldName, "-"); try { processor.execute(ingestDocument); } catch(IllegalArgumentException e) { @@ -91,7 +91,7 @@ public class JoinProcessorTests extends ESTestCase { public void testJoinNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = new JoinProcessor(randomAsciiOfLength(10), fieldName, "-"); + Processor processor = new JoinProcessor(randomAlphaOfLength(10), fieldName, "-"); try { processor.execute(ingestDocument); } catch(IllegalArgumentException e) { @@ -101,7 +101,7 @@ public class JoinProcessorTests extends ESTestCase { public void testJoinNullValue() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null)); - Processor processor = new JoinProcessor(randomAsciiOfLength(10), "field", "-"); + Processor processor = new JoinProcessor(randomAlphaOfLength(10), "field", "-"); try { processor.execute(ingestDocument); } catch(IllegalArgumentException e) { diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorFactoryTests.java index 456b31f8720..66f163a31eb 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorFactoryTests.java @@ -21,11 +21,8 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.test.ESTestCase; -import org.junit.Before; -import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -36,9 +33,9 @@ public class JsonProcessorFactoryTests extends ESTestCase { private static final JsonProcessor.Factory FACTORY = new JsonProcessor.Factory(); public void testCreate() throws Exception { - String processorTag = randomAsciiOfLength(10); - String randomField = randomAsciiOfLength(10); - String randomTargetField = randomAsciiOfLength(5); + String processorTag = randomAlphaOfLength(10); + String randomField = randomAlphaOfLength(10); + String randomTargetField = randomAlphaOfLength(5); Map config = new HashMap<>(); config.put("field", randomField); config.put("target_field", randomTargetField); @@ -49,8 +46,8 @@ public class JsonProcessorFactoryTests extends ESTestCase { } public void testCreateWithAddToRoot() throws Exception { - String processorTag = randomAsciiOfLength(10); - String randomField = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); + String randomField = randomAlphaOfLength(10); Map config = new HashMap<>(); config.put("field", randomField); config.put("add_to_root", true); @@ -62,8 +59,8 @@ public class JsonProcessorFactoryTests extends ESTestCase { } public void testCreateWithDefaultTarget() throws Exception { - String processorTag = randomAsciiOfLength(10); - String randomField = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); + String randomField = randomAlphaOfLength(10); Map config = new HashMap<>(); config.put("field", randomField); JsonProcessor jsonProcessor = FACTORY.create(null, processorTag, config); @@ -74,21 +71,21 @@ public class JsonProcessorFactoryTests extends ESTestCase { public void testCreateWithMissingField() throws Exception { Map config = new HashMap<>(); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); ElasticsearchException exception = expectThrows(ElasticsearchParseException.class, () -> FACTORY.create(null, processorTag, config)); assertThat(exception.getMessage(), equalTo("[field] required property is missing")); } public void testCreateWithBothTargetFieldAndAddToRoot() throws Exception { - String randomField = randomAsciiOfLength(10); - String randomTargetField = randomAsciiOfLength(5); + String randomField = randomAlphaOfLength(10); + String randomTargetField = randomAlphaOfLength(5); Map config = new HashMap<>(); config.put("field", randomField); config.put("target_field", randomTargetField); config.put("add_to_root", true); ElasticsearchException exception = expectThrows(ElasticsearchParseException.class, - () -> FACTORY.create(null, randomAsciiOfLength(10), config)); + () -> FACTORY.create(null, randomAlphaOfLength(10), config)); assertThat(exception.getMessage(), equalTo("[target_field] Cannot set a target field while also setting `add_to_root` to true")); } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java index 5d087ebbc12..66ffb87e8da 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java @@ -36,9 +36,9 @@ public class JsonProcessorTests extends ESTestCase { @SuppressWarnings("unchecked") public void testExecute() throws Exception { - String processorTag = randomAsciiOfLength(3); - String randomField = randomAsciiOfLength(3); - String randomTargetField = randomAsciiOfLength(2); + String processorTag = randomAlphaOfLength(3); + String randomField = randomAlphaOfLength(3); + String randomTargetField = randomAlphaOfLength(2); JsonProcessor jsonProcessor = new JsonProcessor(processorTag, randomField, randomTargetField, false); Map document = new HashMap<>(); @@ -76,8 +76,8 @@ public class JsonProcessorTests extends ESTestCase { @SuppressWarnings("unchecked") public void testAddToRoot() throws Exception { - String processorTag = randomAsciiOfLength(3); - String randomTargetField = randomAsciiOfLength(2); + String processorTag = randomAlphaOfLength(3); + String randomTargetField = randomAlphaOfLength(2); JsonProcessor jsonProcessor = new JsonProcessor(processorTag, "a", randomTargetField, true); Map document = new HashMap<>(); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorFactoryTests.java index 4dc4e082655..8c1e8a71492 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorFactoryTests.java @@ -39,7 +39,7 @@ public class KeyValueProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("field_split", "&"); config.put("value_split", "="); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); KeyValueProcessor processor = factory.create(null, processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getField(), equalTo("field1")); @@ -59,7 +59,7 @@ public class KeyValueProcessorFactoryTests extends ESTestCase { config.put("target_field", "target"); config.put("include_keys", Arrays.asList("a", "b")); config.put("ignore_missing", true); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); KeyValueProcessor processor = factory.create(null, processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getField(), equalTo("field1")); @@ -73,7 +73,7 @@ public class KeyValueProcessorFactoryTests extends ESTestCase { public void testCreateWithMissingField() { KeyValueProcessor.Factory factory = new KeyValueProcessor.Factory(); Map config = new HashMap<>(); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); ElasticsearchException exception = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, processorTag, config)); assertThat(exception.getMessage(), equalTo("[field] required property is missing")); @@ -83,7 +83,7 @@ public class KeyValueProcessorFactoryTests extends ESTestCase { KeyValueProcessor.Factory factory = new KeyValueProcessor.Factory(); Map config = new HashMap<>(); config.put("field", "field1"); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); ElasticsearchException exception = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, processorTag, config)); assertThat(exception.getMessage(), equalTo("[field_split] required property is missing")); @@ -94,7 +94,7 @@ public class KeyValueProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); config.put("field_split", "&"); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); ElasticsearchException exception = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, processorTag, config)); assertThat(exception.getMessage(), equalTo("[value_split] required property is missing")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorTests.java index f5db4be1435..1c7103d35b4 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorTests.java @@ -36,7 +36,7 @@ public class KeyValueProcessorTests extends ESTestCase { public void test() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe"); - Processor processor = new KeyValueProcessor(randomAsciiOfLength(10), fieldName, "&", "=", null, "target", false); + Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=", null, "target", false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello")); assertThat(ingestDocument.getFieldValue("target.second", List.class), equalTo(Arrays.asList("world", "universe"))); @@ -45,7 +45,7 @@ public class KeyValueProcessorTests extends ESTestCase { public void testRootTarget() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); ingestDocument.setFieldValue("myField", "first=hello&second=world&second=universe"); - Processor processor = new KeyValueProcessor(randomAsciiOfLength(10), "myField", "&", "=", null, null, false); + Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "myField", "&", "=", null, null, false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("first", String.class), equalTo("hello")); assertThat(ingestDocument.getFieldValue("second", List.class), equalTo(Arrays.asList("world", "universe"))); @@ -54,7 +54,7 @@ public class KeyValueProcessorTests extends ESTestCase { public void testKeySameAsSourceField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); ingestDocument.setFieldValue("first", "first=hello"); - Processor processor = new KeyValueProcessor(randomAsciiOfLength(10), "first", "&", "=", null, null, false); + Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "first", "&", "=", null, null, false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("first", List.class), equalTo(Arrays.asList("first=hello", "hello"))); } @@ -62,7 +62,7 @@ public class KeyValueProcessorTests extends ESTestCase { public void testIncludeKeys() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe"); - Processor processor = new KeyValueProcessor(randomAsciiOfLength(10), fieldName, "&", "=", + Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=", Collections.singletonList("first"), "target", false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello")); @@ -71,7 +71,7 @@ public class KeyValueProcessorTests extends ESTestCase { public void testMissingField() { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); - Processor processor = new KeyValueProcessor(randomAsciiOfLength(10), "unknown", "&", "=", null, "target", false); + Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "unknown", "&", "=", null, "target", false); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument)); assertThat(exception.getMessage(), equalTo("field [unknown] not present as part of path [unknown]")); } @@ -81,7 +81,7 @@ public class KeyValueProcessorTests extends ESTestCase { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap(fieldName, null)); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); - Processor processor = new KeyValueProcessor(randomAsciiOfLength(10), fieldName, "", "", null, "target", true); + Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "", "", null, "target", true); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); } @@ -89,7 +89,7 @@ public class KeyValueProcessorTests extends ESTestCase { public void testNonExistentWithIgnoreMissing() throws Exception { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); - Processor processor = new KeyValueProcessor(randomAsciiOfLength(10), "unknown", "", "", null, "target", true); + Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "unknown", "", "", null, "target", true); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); } @@ -97,7 +97,7 @@ public class KeyValueProcessorTests extends ESTestCase { public void testFailFieldSplitMatch() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello|second=world|second=universe"); - Processor processor = new KeyValueProcessor(randomAsciiOfLength(10), fieldName, "&", "=", null, "target", false); + Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=", null, "target", false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello|second=world|second=universe")); assertFalse(ingestDocument.hasField("target.second")); @@ -105,7 +105,7 @@ public class KeyValueProcessorTests extends ESTestCase { public void testFailValueSplitMatch() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("foo", "bar")); - Processor processor = new KeyValueProcessor(randomAsciiOfLength(10), "foo", "&", "=", null, "target", false); + Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "foo", "&", "=", null, "target", false); Exception exception = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument)); assertThat(exception.getMessage(), equalTo("field [foo] does not contain value_split [=]")); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/LowercaseProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/LowercaseProcessorFactoryTests.java index 2c80071b803..dc12556df4d 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/LowercaseProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/LowercaseProcessorFactoryTests.java @@ -34,7 +34,7 @@ public class LowercaseProcessorFactoryTests extends ESTestCase { LowercaseProcessor.Factory factory = new LowercaseProcessor.Factory(); Map config = new HashMap<>(); config.put("field", "field1"); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); LowercaseProcessor uppercaseProcessor = (LowercaseProcessor)factory.create(null, processorTag, config); assertThat(uppercaseProcessor.getTag(), equalTo(processorTag)); assertThat(uppercaseProcessor.getField(), equalTo("field1")); @@ -46,7 +46,7 @@ public class LowercaseProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); config.put("ignore_missing", true); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); LowercaseProcessor uppercaseProcessor = (LowercaseProcessor)factory.create(null, processorTag, config); assertThat(uppercaseProcessor.getTag(), equalTo(processorTag)); assertThat(uppercaseProcessor.getField(), equalTo("field1")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/LowercaseProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/LowercaseProcessorTests.java index 560a949f156..77e26d3eb96 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/LowercaseProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/LowercaseProcessorTests.java @@ -24,7 +24,7 @@ import java.util.Locale; public class LowercaseProcessorTests extends AbstractStringProcessorTestCase { @Override protected AbstractStringProcessor newProcessor(String field, boolean ignoreMissing) { - return new LowercaseProcessor(randomAsciiOfLength(10), field, ignoreMissing); + return new LowercaseProcessor(randomAlphaOfLength(10), field, ignoreMissing); } @Override diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorFactoryTests.java index 71e878744d5..d53e5ff1cd0 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorFactoryTests.java @@ -43,7 +43,7 @@ public class RemoveProcessorFactoryTests extends ESTestCase { public void testCreate() throws Exception { Map config = new HashMap<>(); config.put("field", "field1"); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); RemoveProcessor removeProcessor = factory.create(null, processorTag, config); assertThat(removeProcessor.getTag(), equalTo(processorTag)); assertThat(removeProcessor.getField().execute(Collections.emptyMap()), equalTo("field1")); @@ -63,7 +63,7 @@ public class RemoveProcessorFactoryTests extends ESTestCase { RemoveProcessor.Factory factory = new RemoveProcessor.Factory(TestTemplateService.instance(true)); Map config = new HashMap<>(); config.put("field", "field1"); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> factory.create(null, processorTag, config)); assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); assertThat(exception.getHeader("processor_tag").get(0), equalTo(processorTag)); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorTests.java index fd569567d1a..60748a52031 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorTests.java @@ -35,7 +35,7 @@ public class RemoveProcessorTests extends ESTestCase { public void testRemoveFields() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String field = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); - Processor processor = new RemoveProcessor(randomAsciiOfLength(10), new TestTemplateService.MockTemplate(field)); + Processor processor = new RemoveProcessor(randomAlphaOfLength(10), new TestTemplateService.MockTemplate(field)); processor.execute(ingestDocument); assertThat(ingestDocument.hasField(field), equalTo(false)); } @@ -43,7 +43,7 @@ public class RemoveProcessorTests extends ESTestCase { public void testRemoveNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = new RemoveProcessor(randomAsciiOfLength(10), new TestTemplateService.MockTemplate(fieldName)); + Processor processor = new RemoveProcessor(randomAlphaOfLength(10), new TestTemplateService.MockTemplate(fieldName)); try { processor.execute(ingestDocument); fail("remove field should have failed"); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java index b631b78bf03..1eaac36a64c 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java @@ -34,7 +34,7 @@ public class RenameProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "old_field"); config.put("target_field", "new_field"); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); RenameProcessor renameProcessor = factory.create(null, processorTag, config); assertThat(renameProcessor.getTag(), equalTo(processorTag)); assertThat(renameProcessor.getField(), equalTo("old_field")); @@ -48,7 +48,7 @@ public class RenameProcessorFactoryTests extends ESTestCase { config.put("field", "old_field"); config.put("target_field", "new_field"); config.put("ignore_missing", true); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); RenameProcessor renameProcessor = factory.create(null, processorTag, config); assertThat(renameProcessor.getTag(), equalTo(processorTag)); assertThat(renameProcessor.getField(), equalTo("old_field")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java index 9fae812822b..758e5eb9972 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java @@ -45,7 +45,7 @@ public class RenameProcessorTests extends ESTestCase { do { newFieldName = RandomDocumentPicks.randomFieldName(random()); } while (RandomDocumentPicks.canAddField(newFieldName, ingestDocument) == false || newFieldName.equals(fieldName)); - Processor processor = new RenameProcessor(randomAsciiOfLength(10), fieldName, newFieldName, false); + Processor processor = new RenameProcessor(randomAlphaOfLength(10), fieldName, newFieldName, false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(newFieldName, Object.class), equalTo(fieldValue)); } @@ -63,7 +63,7 @@ public class RenameProcessorTests extends ESTestCase { document.put("one", one); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - Processor processor = new RenameProcessor(randomAsciiOfLength(10), "list.0", "item", false); + Processor processor = new RenameProcessor(randomAlphaOfLength(10), "list.0", "item", false); processor.execute(ingestDocument); Object actualObject = ingestDocument.getSourceAndMetadata().get("list"); assertThat(actualObject, instanceOf(List.class)); @@ -76,7 +76,7 @@ public class RenameProcessorTests extends ESTestCase { assertThat(actualObject, instanceOf(String.class)); assertThat(actualObject, equalTo("item1")); - processor = new RenameProcessor(randomAsciiOfLength(10), "list.0", "list.3", false); + processor = new RenameProcessor(randomAlphaOfLength(10), "list.0", "list.3", false); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -91,7 +91,7 @@ public class RenameProcessorTests extends ESTestCase { public void testRenameNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = new RenameProcessor(randomAsciiOfLength(10), fieldName, + Processor processor = new RenameProcessor(randomAlphaOfLength(10), fieldName, RandomDocumentPicks.randomFieldName(random()), false); try { processor.execute(ingestDocument); @@ -105,7 +105,7 @@ public class RenameProcessorTests extends ESTestCase { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = new RenameProcessor(randomAsciiOfLength(10), fieldName, + Processor processor = new RenameProcessor(randomAlphaOfLength(10), fieldName, RandomDocumentPicks.randomFieldName(random()), true); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); @@ -114,7 +114,7 @@ public class RenameProcessorTests extends ESTestCase { public void testRenameNewFieldAlreadyExists() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); - Processor processor = new RenameProcessor(randomAsciiOfLength(10), RandomDocumentPicks.randomExistingFieldName( + Processor processor = new RenameProcessor(randomAlphaOfLength(10), RandomDocumentPicks.randomExistingFieldName( random(), ingestDocument), fieldName, false); try { processor.execute(ingestDocument); @@ -129,7 +129,7 @@ public class RenameProcessorTests extends ESTestCase { String fieldName = RandomDocumentPicks.randomFieldName(random()); ingestDocument.setFieldValue(fieldName, null); String newFieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = new RenameProcessor(randomAsciiOfLength(10), fieldName, newFieldName, false); + Processor processor = new RenameProcessor(randomAlphaOfLength(10), fieldName, newFieldName, false); processor.execute(ingestDocument); assertThat(ingestDocument.hasField(fieldName), equalTo(false)); assertThat(ingestDocument.hasField(newFieldName), equalTo(true)); @@ -149,7 +149,7 @@ public class RenameProcessorTests extends ESTestCase { source.put("list", Collections.singletonList("item")); IngestDocument ingestDocument = new IngestDocument(source, Collections.emptyMap()); - Processor processor = new RenameProcessor(randomAsciiOfLength(10), "list", "new_field", false); + Processor processor = new RenameProcessor(randomAlphaOfLength(10), "list", "new_field", false); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -173,7 +173,7 @@ public class RenameProcessorTests extends ESTestCase { source.put("list", Collections.singletonList("item")); IngestDocument ingestDocument = new IngestDocument(source, Collections.emptyMap()); - Processor processor = new RenameProcessor(randomAsciiOfLength(10), "list", "new_field", false); + Processor processor = new RenameProcessor(randomAlphaOfLength(10), "list", "new_field", false); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -188,12 +188,12 @@ public class RenameProcessorTests extends ESTestCase { Map source = new HashMap<>(); source.put("foo", "bar"); IngestDocument ingestDocument = new IngestDocument(source, Collections.emptyMap()); - Processor processor1 = new RenameProcessor(randomAsciiOfLength(10), "foo", "foo.bar", false); + Processor processor1 = new RenameProcessor(randomAlphaOfLength(10), "foo", "foo.bar", false); processor1.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("foo", Map.class), equalTo(Collections.singletonMap("bar", "bar"))); assertThat(ingestDocument.getFieldValue("foo.bar", String.class), equalTo("bar")); - Processor processor2 = new RenameProcessor(randomAsciiOfLength(10), "foo.bar", "foo.bar.baz", false); + Processor processor2 = new RenameProcessor(randomAlphaOfLength(10), "foo.bar", "foo.bar.baz", false); processor2.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("foo", Map.class), equalTo(Collections.singletonMap("bar", Collections.singletonMap("baz", "bar")))); @@ -201,7 +201,7 @@ public class RenameProcessorTests extends ESTestCase { assertThat(ingestDocument.getFieldValue("foo.bar.baz", String.class), equalTo("bar")); // for fun lets try to restore it (which don't allow today) - Processor processor3 = new RenameProcessor(randomAsciiOfLength(10), "foo.bar.baz", "foo", false); + Processor processor3 = new RenameProcessor(randomAlphaOfLength(10), "foo.bar.baz", "foo", false); Exception e = expectThrows(IllegalArgumentException.class, () -> processor3.execute(ingestDocument)); assertThat(e.getMessage(), equalTo("field [foo] already exists")); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java index 60b42979466..b3ee7a23a5d 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java @@ -57,7 +57,7 @@ public class ScriptProcessorFactoryTests extends ESTestCase { Map configMap = new HashMap<>(); String randomType = randomFrom("id", "inline", "file"); configMap.put(randomType, "foo"); - ScriptProcessor processor = factory.create(null, randomAsciiOfLength(10), configMap); + ScriptProcessor processor = factory.create(null, randomAlphaOfLength(10), configMap); assertThat(processor.getScript().getLang(), equalTo(Script.DEFAULT_SCRIPT_LANG)); assertThat(processor.getScript().getType().toString(), equalTo(ingestScriptParamToType.get(randomType))); assertThat(processor.getScript().getParams(), equalTo(Collections.emptyMap())); @@ -66,10 +66,10 @@ public class ScriptProcessorFactoryTests extends ESTestCase { public void testFactoryValidationWithParams() throws Exception { Map configMap = new HashMap<>(); String randomType = randomFrom("id", "inline", "file"); - Map randomParams = Collections.singletonMap(randomAsciiOfLength(10), randomAsciiOfLength(10)); + Map randomParams = Collections.singletonMap(randomAlphaOfLength(10), randomAlphaOfLength(10)); configMap.put(randomType, "foo"); configMap.put("params", randomParams); - ScriptProcessor processor = factory.create(null, randomAsciiOfLength(10), configMap); + ScriptProcessor processor = factory.create(null, randomAlphaOfLength(10), configMap); assertThat(processor.getScript().getLang(), equalTo(Script.DEFAULT_SCRIPT_LANG)); assertThat(processor.getScript().getType().toString(), equalTo(ingestScriptParamToType.get(randomType))); assertThat(processor.getScript().getParams(), equalTo(randomParams)); @@ -88,7 +88,7 @@ public class ScriptProcessorFactoryTests extends ESTestCase { configMap.put("lang", "mockscript"); ElasticsearchException exception = expectThrows(ElasticsearchException.class, - () -> factory.create(null, randomAsciiOfLength(10), configMap)); + () -> factory.create(null, randomAlphaOfLength(10), configMap)); assertThat(exception.getMessage(), is("Only one of [file], [id], or [inline] may be configured")); } @@ -97,7 +97,7 @@ public class ScriptProcessorFactoryTests extends ESTestCase { configMap.put("lang", "mockscript"); ElasticsearchException exception = expectThrows(ElasticsearchException.class, - () -> factory.create(null, randomAsciiOfLength(10), configMap)); + () -> factory.create(null, randomAlphaOfLength(10), configMap)); assertThat(exception.getMessage(), is("Need [file], [id], or [inline] parameter to refer to scripts")); } @@ -115,7 +115,7 @@ public class ScriptProcessorFactoryTests extends ESTestCase { configMap.put(randomType, "my_script"); ElasticsearchException exception = expectThrows(ElasticsearchException.class, - () -> factory.create(null, randomAsciiOfLength(10), configMap)); + () -> factory.create(null, randomAlphaOfLength(10), configMap)); assertThat(exception.getMessage(), is("compile-time exception")); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java index 8b16fdb7e61..94430622d1a 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java @@ -58,7 +58,7 @@ public class ScriptProcessorTests extends ESTestCase { return null; }).when(executableScript).run(); - ScriptProcessor processor = new ScriptProcessor(randomAsciiOfLength(10), script, scriptService); + ScriptProcessor processor = new ScriptProcessor(randomAlphaOfLength(10), script, scriptService); processor.execute(ingestDocument); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java index 45f144e3305..57d0b7f7b93 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java @@ -44,7 +44,7 @@ public class SetProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); config.put("value", "value1"); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); SetProcessor setProcessor = factory.create(null, processorTag, config); assertThat(setProcessor.getTag(), equalTo(processorTag)); assertThat(setProcessor.getField().execute(Collections.emptyMap()), equalTo("field1")); @@ -58,7 +58,7 @@ public class SetProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("value", "value1"); config.put("override", overrideEnabled); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); SetProcessor setProcessor = factory.create(null, processorTag, config); assertThat(setProcessor.getTag(), equalTo(processorTag)); assertThat(setProcessor.getField().execute(Collections.emptyMap()), equalTo("field1")); @@ -105,7 +105,7 @@ public class SetProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); config.put("value", "value1"); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> factory.create(null, processorTag, config)); assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); assertThat(exception.getHeader("processor_tag").get(0), equalTo(processorTag)); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorTests.java index 9ef8fa44ac9..d5a693b2bc1 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorTests.java @@ -110,7 +110,7 @@ public class SetProcessorTests extends ESTestCase { private static Processor createSetProcessor(String fieldName, Object fieldValue, boolean overrideEnabled) { TemplateService templateService = TestTemplateService.instance(); - return new SetProcessor(randomAsciiOfLength(10), templateService.compile(fieldName), + return new SetProcessor(randomAlphaOfLength(10), templateService.compile(fieldName), ValueSource.wrap(fieldValue, templateService), overrideEnabled); } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SortProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SortProcessorTests.java index 97352109e2c..6b878b00a5a 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SortProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SortProcessorTests.java @@ -42,7 +42,7 @@ public class SortProcessorTests extends ESTestCase { List fieldValue = new ArrayList<>(numItems); List expectedResult = new ArrayList<>(numItems); for (int j = 0; j < numItems; j++) { - String value = randomAsciiOfLengthBetween(1, 10); + String value = randomAlphaOfLengthBetween(1, 10); fieldValue.add(value); expectedResult.add(value); } @@ -54,7 +54,7 @@ public class SortProcessorTests extends ESTestCase { } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new SortProcessor(randomAsciiOfLength(10), fieldName, order); + Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, order); processor.execute(ingestDocument); assertEquals(ingestDocument.getFieldValue(fieldName, List.class), expectedResult); } @@ -68,7 +68,7 @@ public class SortProcessorTests extends ESTestCase { Collections.shuffle(fieldValue, random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new SortProcessor(randomAsciiOfLength(10), fieldName, SortOrder.ASCENDING); + Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, SortOrder.ASCENDING); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, List.class).toArray(), equalTo(expectedResult)); } @@ -91,7 +91,7 @@ public class SortProcessorTests extends ESTestCase { } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new SortProcessor(randomAsciiOfLength(10), fieldName, order); + Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, order); processor.execute(ingestDocument); assertEquals(ingestDocument.getFieldValue(fieldName, List.class), expectedResult); } @@ -114,7 +114,7 @@ public class SortProcessorTests extends ESTestCase { } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new SortProcessor(randomAsciiOfLength(10), fieldName, order); + Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, order); processor.execute(ingestDocument); assertEquals(ingestDocument.getFieldValue(fieldName, List.class), expectedResult); } @@ -137,7 +137,7 @@ public class SortProcessorTests extends ESTestCase { } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new SortProcessor(randomAsciiOfLength(10), fieldName, order); + Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, order); processor.execute(ingestDocument); assertEquals(ingestDocument.getFieldValue(fieldName, List.class), expectedResult); } @@ -160,7 +160,7 @@ public class SortProcessorTests extends ESTestCase { } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new SortProcessor(randomAsciiOfLength(10), fieldName, order); + Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, order); processor.execute(ingestDocument); assertEquals(ingestDocument.getFieldValue(fieldName, List.class), expectedResult); } @@ -183,7 +183,7 @@ public class SortProcessorTests extends ESTestCase { } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new SortProcessor(randomAsciiOfLength(10), fieldName, order); + Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, order); processor.execute(ingestDocument); assertEquals(ingestDocument.getFieldValue(fieldName, List.class), expectedResult); } @@ -206,7 +206,7 @@ public class SortProcessorTests extends ESTestCase { } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new SortProcessor(randomAsciiOfLength(10), fieldName, order); + Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, order); processor.execute(ingestDocument); assertEquals(ingestDocument.getFieldValue(fieldName, List.class), expectedResult); } @@ -221,7 +221,7 @@ public class SortProcessorTests extends ESTestCase { if (randomBoolean()) { value = String.valueOf(randomIntBetween(0, 100)); } else { - value = randomAsciiOfLengthBetween(1, 10); + value = randomAlphaOfLengthBetween(1, 10); } fieldValue.add(value); expectedResult.add(value); @@ -234,7 +234,7 @@ public class SortProcessorTests extends ESTestCase { } String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue); - Processor processor = new SortProcessor(randomAsciiOfLength(10), fieldName, order); + Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, order); processor.execute(ingestDocument); assertEquals(ingestDocument.getFieldValue(fieldName, List.class), expectedResult); } @@ -242,9 +242,9 @@ public class SortProcessorTests extends ESTestCase { public void testSortNonListField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - ingestDocument.setFieldValue(fieldName, randomAsciiOfLengthBetween(1, 10)); + ingestDocument.setFieldValue(fieldName, randomAlphaOfLengthBetween(1, 10)); SortOrder order = randomBoolean() ? SortOrder.ASCENDING : SortOrder.DESCENDING; - Processor processor = new SortProcessor(randomAsciiOfLength(10), fieldName, order); + Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, order); try { processor.execute(ingestDocument); } catch(IllegalArgumentException e) { @@ -256,7 +256,7 @@ public class SortProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); SortOrder order = randomBoolean() ? SortOrder.ASCENDING : SortOrder.DESCENDING; - Processor processor = new SortProcessor(randomAsciiOfLength(10), fieldName, order); + Processor processor = new SortProcessor(randomAlphaOfLength(10), fieldName, order); try { processor.execute(ingestDocument); } catch(IllegalArgumentException e) { @@ -267,7 +267,7 @@ public class SortProcessorTests extends ESTestCase { public void testSortNullValue() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null)); SortOrder order = randomBoolean() ? SortOrder.ASCENDING : SortOrder.DESCENDING; - Processor processor = new SortProcessor(randomAsciiOfLength(10), "field", order); + Processor processor = new SortProcessor(randomAlphaOfLength(10), "field", order); try { processor.execute(ingestDocument); } catch(IllegalArgumentException e) { diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorFactoryTests.java index 68580eb8f1d..b165dae39b2 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorFactoryTests.java @@ -34,7 +34,7 @@ public class SplitProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); config.put("separator", "\\."); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); SplitProcessor splitProcessor = factory.create(null, processorTag, config); assertThat(splitProcessor.getTag(), equalTo(processorTag)); assertThat(splitProcessor.getField(), equalTo("field1")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorTests.java index 253ddfd194b..257719f7f70 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorTests.java @@ -39,7 +39,7 @@ public class SplitProcessorTests extends ESTestCase { public void testSplit() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "127.0.0.1"); - Processor processor = new SplitProcessor(randomAsciiOfLength(10), fieldName, "\\.", false); + Processor processor = new SplitProcessor(randomAlphaOfLength(10), fieldName, "\\.", false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, List.class), equalTo(Arrays.asList("127", "0", "0", "1"))); } @@ -47,7 +47,7 @@ public class SplitProcessorTests extends ESTestCase { public void testSplitFieldNotFound() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = new SplitProcessor(randomAsciiOfLength(10), fieldName, "\\.", false); + Processor processor = new SplitProcessor(randomAlphaOfLength(10), fieldName, "\\.", false); try { processor.execute(ingestDocument); fail("split processor should have failed"); @@ -59,7 +59,7 @@ public class SplitProcessorTests extends ESTestCase { public void testSplitNullValue() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", null)); - Processor processor = new SplitProcessor(randomAsciiOfLength(10), "field", "\\.", false); + Processor processor = new SplitProcessor(randomAlphaOfLength(10), "field", "\\.", false); try { processor.execute(ingestDocument); fail("split processor should have failed"); @@ -73,7 +73,7 @@ public class SplitProcessorTests extends ESTestCase { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap(fieldName, null)); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); - Processor processor = new SplitProcessor(randomAsciiOfLength(10), fieldName, "\\.", true); + Processor processor = new SplitProcessor(randomAlphaOfLength(10), fieldName, "\\.", true); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); } @@ -81,7 +81,7 @@ public class SplitProcessorTests extends ESTestCase { public void testSplitNonExistentWithIgnoreMissing() throws Exception { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); - Processor processor = new SplitProcessor(randomAsciiOfLength(10), "field", "\\.", true); + Processor processor = new SplitProcessor(randomAlphaOfLength(10), "field", "\\.", true); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); } @@ -90,7 +90,7 @@ public class SplitProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); ingestDocument.setFieldValue(fieldName, randomInt()); - Processor processor = new SplitProcessor(randomAsciiOfLength(10), fieldName, "\\.", false); + Processor processor = new SplitProcessor(randomAlphaOfLength(10), fieldName, "\\.", false); try { processor.execute(ingestDocument); fail("split processor should have failed"); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TrimProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TrimProcessorFactoryTests.java index bcc0dde9409..ee57724bfcd 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TrimProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TrimProcessorFactoryTests.java @@ -34,7 +34,7 @@ public class TrimProcessorFactoryTests extends ESTestCase { TrimProcessor.Factory factory = new TrimProcessor.Factory(); Map config = new HashMap<>(); config.put("field", "field1"); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); TrimProcessor uppercaseProcessor = (TrimProcessor)factory.create(null, processorTag, config); assertThat(uppercaseProcessor.getTag(), equalTo(processorTag)); assertThat(uppercaseProcessor.getField(), equalTo("field1")); @@ -46,7 +46,7 @@ public class TrimProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); config.put("ignore_missing", true); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); TrimProcessor uppercaseProcessor = (TrimProcessor)factory.create(null, processorTag, config); assertThat(uppercaseProcessor.getTag(), equalTo(processorTag)); assertThat(uppercaseProcessor.getField(), equalTo("field1")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TrimProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TrimProcessorTests.java index ce21cf3e381..99b543b8fe1 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TrimProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TrimProcessorTests.java @@ -23,7 +23,7 @@ public class TrimProcessorTests extends AbstractStringProcessorTestCase { @Override protected AbstractStringProcessor newProcessor(String field, boolean ignoreMissing) { - return new TrimProcessor(randomAsciiOfLength(10), field, ignoreMissing); + return new TrimProcessor(randomAlphaOfLength(10), field, ignoreMissing); } @Override diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UppercaseProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UppercaseProcessorFactoryTests.java index 0f834119510..7dbdf123298 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UppercaseProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UppercaseProcessorFactoryTests.java @@ -33,7 +33,7 @@ public class UppercaseProcessorFactoryTests extends ESTestCase { UppercaseProcessor.Factory factory = new UppercaseProcessor.Factory(); Map config = new HashMap<>(); config.put("field", "field1"); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); UppercaseProcessor uppercaseProcessor = (UppercaseProcessor)factory.create(null, processorTag, config); assertThat(uppercaseProcessor.getTag(), equalTo(processorTag)); assertThat(uppercaseProcessor.getField(), equalTo("field1")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UppercaseProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UppercaseProcessorTests.java index e52c240928d..2e4c6bf9ce9 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UppercaseProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UppercaseProcessorTests.java @@ -25,7 +25,7 @@ public class UppercaseProcessorTests extends AbstractStringProcessorTestCase { @Override protected AbstractStringProcessor newProcessor(String field, boolean ignoreMissing) { - return new UppercaseProcessor(randomAsciiOfLength(10), field, ignoreMissing); + return new UppercaseProcessor(randomAlphaOfLength(10), field, ignoreMissing); } @Override diff --git a/modules/lang-expression/licenses/lucene-expressions-6.5.0-snapshot-d00c5ca.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-6.5.0-snapshot-d00c5ca.jar.sha1 deleted file mode 100644 index f2b1d1ddfef..00000000000 --- a/modules/lang-expression/licenses/lucene-expressions-6.5.0-snapshot-d00c5ca.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -eb201cc666e834f5f128cea00acdf2c046fcbb87 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-6.5.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-6.5.0.jar.sha1 new file mode 100644 index 00000000000..10514bb3d1f --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-6.5.0.jar.sha1 @@ -0,0 +1 @@ +5dfd44932fc77187a233a1cbf228c1a96ac8924f \ No newline at end of file diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java index 5e9f6a5c91e..0d527590e47 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java @@ -377,7 +377,7 @@ public class MustacheTests extends ESTestCase { assertScript("{{#url}}{{index}}{{/url}}", singletonMap("index", ""), equalTo("%3Clogstash-%7Bnow%2Fd%7BYYYY.MM.dd%7C%2B12%3A00%7D%7D%3E")); - final String random = randomAsciiOfLength(10); + final String random = randomAlphaOfLength(10); assertScript("{{#url}}prefix_{{s}}{{/url}}", singletonMap("s", random), equalTo("prefix_" + URLEncoder.encode(random, StandardCharsets.UTF_8.name()))); } diff --git a/modules/lang-painless/ant.xml b/modules/lang-painless/ant.xml deleted file mode 100644 index 90e66b7b1a9..00000000000 --- a/modules/lang-painless/ant.xml +++ /dev/null @@ -1,157 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle index c9e013d8c6d..31b41261b3a 100644 --- a/modules/lang-painless/build.gradle +++ b/modules/lang-painless/build.gradle @@ -33,20 +33,6 @@ dependencyLicenses { mapping from: /asm-.*/, to: 'asm' } -// regeneration logic, comes in via ant right now -// don't port it to gradle, it works fine. - -configurations { - regenerate -} - -dependencies { - regenerate 'org.antlr:antlr4:4.5.1-1' -} - -ant.references['regenerate.classpath'] = new Path(ant.project, configurations.regenerate.asPath) -ant.importBuild 'ant.xml' - integTestCluster { setting 'script.max_compilations_per_minute', '1000' } @@ -70,3 +56,95 @@ task generatePainlessApi(type: JavaExec) { classpath = sourceSets.test.runtimeClasspath args file('../../docs/reference/painless-api-reference') } + +/********************************************** + * Parser regeneration * + **********************************************/ + +configurations { + regenerate +} + +dependencies { + regenerate 'org.antlr:antlr4:4.5.1-1' +} + +String grammarPath = 'src/main/antlr' +String outputPath = 'src/main/java/org/elasticsearch/painless/antlr' + +task cleanGenerated(type: Delete) { + delete fileTree(grammarPath) { + include '*.tokens' + } + delete fileTree(outputPath) { + include 'Painless*.java' + } +} + +task regenLexer(type: JavaExec) { + dependsOn cleanGenerated + main = 'org.antlr.v4.Tool' + classpath = configurations.regenerate + systemProperty 'file.encoding', 'UTF-8' + systemProperty 'user.language', 'en' + systemProperty 'user.country', 'US' + systemProperty 'user.variant', '' + args '-Werror', + '-package', 'org.elasticsearch.painless.antlr', + '-o', outputPath, + "${file(grammarPath)}/PainlessLexer.g4" +} + +task regenParser(type: JavaExec) { + dependsOn regenLexer + main = 'org.antlr.v4.Tool' + classpath = configurations.regenerate + systemProperty 'file.encoding', 'UTF-8' + systemProperty 'user.language', 'en' + systemProperty 'user.country', 'US' + systemProperty 'user.variant', '' + args '-Werror', + '-package', 'org.elasticsearch.painless.antlr', + '-no-listener', + '-visitor', + // '-Xlog', + '-o', outputPath, + "${file(grammarPath)}/PainlessParser.g4" +} + +task regen { + dependsOn regenParser + doLast { + // moves token files to grammar directory for use with IDE's + ant.move(file: "${outputPath}/PainlessLexer.tokens", toDir: grammarPath) + ant.move(file: "${outputPath}/PainlessParser.tokens", toDir: grammarPath) + // make the generated classes package private + ant.replaceregexp(match: 'public ((interface|class) \\QPainless\\E\\w+)', + replace: '\\1', + encoding: 'UTF-8') { + fileset(dir: outputPath, includes: 'Painless*.java') + } + // make the lexer abstract + ant.replaceregexp(match: '(class \\QPainless\\ELexer)', + replace: 'abstract \\1', + encoding: 'UTF-8') { + fileset(dir: outputPath, includes: 'PainlessLexer.java') + } + // nuke timestamps/filenames in generated files + ant.replaceregexp(match: '\\Q// Generated from \\E.*', + replace: '\\/\\/ ANTLR GENERATED CODE: DO NOT EDIT', + encoding: 'UTF-8') { + fileset(dir: outputPath, includes: 'Painless*.java') + } + // remove tabs in antlr generated files + ant.replaceregexp(match: '\t', flags: 'g', replace: ' ', encoding: 'UTF-8') { + fileset(dir: outputPath, includes: 'Painless*.java') + } + // fix line endings + ant.fixcrlf(srcdir: outputPath) { + patternset(includes: 'Painless*.java') + } + } +} + + diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ImplementInterfacesTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ImplementInterfacesTests.java index 2455af32528..fe95e8c8c23 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ImplementInterfacesTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ImplementInterfacesTests.java @@ -62,7 +62,7 @@ public class ImplementInterfacesTests extends ScriptTestCase { public void testOneArg() { Object rando = randomInt(); assertEquals(rando, scriptEngine.compile(OneArg.class, null, "arg", emptyMap()).execute(rando)); - rando = randomAsciiOfLength(5); + rando = randomAlphaOfLength(5); assertEquals(rando, scriptEngine.compile(OneArg.class, null, "arg", emptyMap()).execute(rando)); Exception e = expectScriptThrows(IllegalArgumentException.class, () -> @@ -79,7 +79,7 @@ public class ImplementInterfacesTests extends ScriptTestCase { Object execute(String[] arg); } public void testArrayArg() { - String rando = randomAsciiOfLength(5); + String rando = randomAlphaOfLength(5); assertEquals(rando, scriptEngine.compile(ArrayArg.class, null, "arg[0]", emptyMap()).execute(new String[] {rando, "foo"})); } @@ -99,7 +99,7 @@ public class ImplementInterfacesTests extends ScriptTestCase { public void testDefArrayArg() { Object rando = randomInt(); assertEquals(rando, scriptEngine.compile(DefArrayArg.class, null, "arg[0]", emptyMap()).execute(new Object[] {rando, 10})); - rando = randomAsciiOfLength(5); + rando = randomAlphaOfLength(5); assertEquals(rando, scriptEngine.compile(DefArrayArg.class, null, "arg[0]", emptyMap()).execute(new Object[] {rando, 10})); assertEquals(5, scriptEngine.compile(DefArrayArg.class, null, "arg[0].length()", emptyMap()).execute(new Object[] {rando, 10})); } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java index 23f3fc77c20..a4742218058 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java @@ -85,8 +85,8 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase 0) { int length = randomInt(sumTermLength); shortestTerms1Length = Math.min(shortestTerms1Length, length); - terms1.add(new Term("field", randomAsciiOfLength(length))); + terms1.add(new Term("field", randomAlphaOfLength(length))); sumTermLength -= length; } @@ -546,7 +546,7 @@ public class QueryAnalyzerTests extends ESTestCase { while (sumTermLength > 0) { int length = randomInt(sumTermLength); shortestTerms2Length = Math.min(shortestTerms2Length, length); - terms2.add(new Term("field", randomAsciiOfLength(length))); + terms2.add(new Term("field", randomAlphaOfLength(length))); sumTermLength -= length; } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java index a6afa6df39d..4804818890e 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java @@ -171,7 +171,11 @@ final class RemoteRequestBuilders { return singletonMap("scroll", keepAlive.toString()); } - static HttpEntity scrollEntity(String scroll) { + static HttpEntity scrollEntity(String scroll, Version remoteVersion) { + if (remoteVersion.before(Version.V_2_0_0)) { + // Versions before 2.0.0 extract the plain scroll_id from the body + return new StringEntity(scroll, ContentType.TEXT_PLAIN); + } try (XContentBuilder entity = JsonXContent.contentBuilder()) { return new StringEntity(entity.startObject() .field("scroll_id", scroll) @@ -181,7 +185,11 @@ final class RemoteRequestBuilders { } } - static HttpEntity clearScrollEntity(String scroll) { + static HttpEntity clearScrollEntity(String scroll, Version remoteVersion) { + if (remoteVersion.before(Version.V_2_0_0)) { + // Versions before 2.0.0 extract the plain scroll_id from the body + return new StringEntity(scroll, ContentType.TEXT_PLAIN); + } try (XContentBuilder entity = JsonXContent.contentBuilder()) { return new StringEntity(entity.startObject() .array("scroll_id", scroll) diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java index 6781da64972..974fd9438d2 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java @@ -107,12 +107,12 @@ public class RemoteScrollableHitSource extends ScrollableHitSource { @Override protected void doStartNextScroll(String scrollId, TimeValue extraKeepAlive, Consumer onResponse) { execute("POST", scrollPath(), scrollParams(timeValueNanos(searchRequest.scroll().keepAlive().nanos() + extraKeepAlive.nanos())), - scrollEntity(scrollId), RESPONSE_PARSER, onResponse); + scrollEntity(scrollId, remoteVersion), RESPONSE_PARSER, onResponse); } @Override protected void clearScroll(String scrollId, Runnable onCompletion) { - client.performRequestAsync("DELETE", scrollPath(), emptyMap(), clearScrollEntity(scrollId), new ResponseListener() { + client.performRequestAsync("DELETE", scrollPath(), emptyMap(), clearScrollEntity(scrollId, remoteVersion), new ResponseListener() { @Override public void onSuccess(org.elasticsearch.client.Response response) { logger.debug("Successfully cleared [{}]", scrollId); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseTests.java index d33d8b2c877..791277ad270 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseTests.java @@ -43,13 +43,13 @@ public class BulkIndexByScrollResponseTests extends ESTestCase { List allBulkFailures = new ArrayList<>(); List allSearchFailures = new ArrayList<>(); boolean timedOut = false; - String reasonCancelled = rarely() ? randomAsciiOfLength(5) : null; + String reasonCancelled = rarely() ? randomAlphaOfLength(5) : null; for (int i = 0; i < mergeCount; i++) { // One of the merged responses gets the expected value for took, the others get a smaller value TimeValue thisTook = timeValueMillis(i == tookIndex ? took : between(0, took)); // The actual status doesn't matter too much - we test merging those elsewhere - String thisReasonCancelled = rarely() ? randomAsciiOfLength(5) : null; + String thisReasonCancelled = rarely() ? randomAlphaOfLength(5) : null; BulkByScrollTask.Status status = new BulkByScrollTask.Status(i, 0, 0, 0, 0, 0, 0, 0, 0, 0, timeValueMillis(0), 0f, thisReasonCancelled, timeValueMillis(0)); List bulkFailures = frequently() ? emptyList() diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java index 01c5977e822..2785d535078 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine.Operation.Origin; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.shard.IndexingOperationListener; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.ingest.IngestTestPlugin; @@ -160,7 +161,14 @@ public class CancelTests extends ReindexTestCase { }); // And check the status of the response - BulkByScrollResponse response = future.get(); + BulkByScrollResponse response; + try { + response = future.get(30, TimeUnit.SECONDS); + } catch (Exception e) { + String tasks = client().admin().cluster().prepareListTasks().setParentTaskId(mainTask.getTaskId()) + .setDetailed(true).get().toString(); + throw new RuntimeException("Exception while waiting for the response. Running tasks: " + tasks, e); + } assertThat(response.getReasonCancelled(), equalTo("by user request")); assertThat(response.getBulkFailures(), emptyIterable()); assertThat(response.getSearchFailures(), emptyIterable()); @@ -216,9 +224,10 @@ public class CancelTests extends ReindexTestCase { } public void testDeleteByQueryCancel() throws Exception { - testCancel(DeleteByQueryAction.NAME, deleteByQuery().source(INDEX), (response, total, modified) -> { - assertThat(response, matcher().deleted(modified).reasonCancelled(equalTo("by user request"))); - assertHitCount(client().prepareSearch(INDEX).setSize(0).get(), total - modified); + testCancel(DeleteByQueryAction.NAME, deleteByQuery().source(INDEX).filter(QueryBuilders.matchAllQuery()), + (response, total, modified) -> { + assertThat(response, matcher().deleted(modified).reasonCancelled(equalTo("by user request"))); + assertHitCount(client().prepareSearch(INDEX).setSize(0).get(), total - modified); }, equalTo("delete-by-query [" + INDEX + "]")); } @@ -250,9 +259,10 @@ public class CancelTests extends ReindexTestCase { } public void testDeleteByQueryCancelWithWorkers() throws Exception { - testCancel(DeleteByQueryAction.NAME, deleteByQuery().source(INDEX).setSlices(5), (response, total, modified) -> { - assertThat(response, matcher().deleted(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5))); - assertHitCount(client().prepareSearch(INDEX).setSize(0).get(), total - modified); + testCancel(DeleteByQueryAction.NAME, deleteByQuery().source(INDEX).filter(QueryBuilders.matchAllQuery()).setSlices(5), + (response, total, modified) -> { + assertThat(response, matcher().deleted(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5))); + assertHitCount(client().prepareSearch(INDEX).setSize(0).get(), total - modified); }, equalTo("delete-by-query [" + INDEX + "]")); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryBasicTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryBasicTests.java index 4d920600a5d..f0eba7e9fa6 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryBasicTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryBasicTests.java @@ -56,7 +56,7 @@ public class DeleteByQueryBasicTests extends ReindexTestCase { assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 5); // Deletes the two first docs with limit by size - DeleteByQueryRequestBuilder request = deleteByQuery().source("test").size(2).refresh(true); + DeleteByQueryRequestBuilder request = deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).size(2).refresh(true); request.source().addSort("foo.keyword", SortOrder.ASC); assertThat(request.get(), matcher().deleted(2)); assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 3); @@ -66,7 +66,7 @@ public class DeleteByQueryBasicTests extends ReindexTestCase { assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 3); // Deletes all remaining docs - assertThat(deleteByQuery().source("test").refresh(true).get(), matcher().deleted(3)); + assertThat(deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true).get(), matcher().deleted(3)); assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 0); } @@ -79,7 +79,7 @@ public class DeleteByQueryBasicTests extends ReindexTestCase { } indexRandom(true, true, true, builders); - assertThat(deleteByQuery().source("t*").refresh(true).get(), matcher().deleted(docs)); + assertThat(deleteByQuery().source("t*").filter(QueryBuilders.matchAllQuery()).refresh(true).get(), matcher().deleted(docs)); assertHitCount(client().prepareSearch("test").setSize(0).get(), 0); } @@ -122,7 +122,7 @@ public class DeleteByQueryBasicTests extends ReindexTestCase { assertHitCount(client().prepareSearch().setSize(0).get(), 1); try { - deleteByQuery().source("missing").get(); + deleteByQuery().source("missing").filter(QueryBuilders.matchAllQuery()).get(); fail("should have thrown an exception because of a missing index"); } catch (IndexNotFoundException e) { // Ok @@ -151,7 +151,7 @@ public class DeleteByQueryBasicTests extends ReindexTestCase { long expected = client().prepareSearch().setSize(0).setRouting(routing).get().getHits().getTotalHits(); logger.info("--> delete all documents with routing [{}] with a delete-by-query", routing); - DeleteByQueryRequestBuilder delete = deleteByQuery().source("test"); + DeleteByQueryRequestBuilder delete = deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()); delete.source().setRouting(routing); assertThat(delete.refresh(true).get(), matcher().deleted(expected)); @@ -166,7 +166,7 @@ public class DeleteByQueryBasicTests extends ReindexTestCase { List builders = new ArrayList<>(); for (int i = 0; i < docs; i++) { builders.add(client().prepareIndex("test", "test", Integer.toString(i)) - .setRouting(randomAsciiOfLengthBetween(1, 5)) + .setRouting(randomAlphaOfLengthBetween(1, 5)) .setSource("foo", "bar")); } indexRandom(true, true, true, builders); @@ -202,7 +202,8 @@ public class DeleteByQueryBasicTests extends ReindexTestCase { try { enableIndexBlock("test", IndexMetaData.SETTING_READ_ONLY); - assertThat(deleteByQuery().source("test").refresh(true).get(), matcher().deleted(0).failures(docs)); + assertThat(deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true).get(), + matcher().deleted(0).failures(docs)); } finally { disableIndexBlock("test", IndexMetaData.SETTING_READ_ONLY); } @@ -228,7 +229,8 @@ public class DeleteByQueryBasicTests extends ReindexTestCase { assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 5); // Delete remaining docs - DeleteByQueryRequestBuilder request = deleteByQuery().source("test").refresh(true).setSlices(5); + DeleteByQueryRequestBuilder request = deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true) + .setSlices(5); assertThat(request.get(), matcher().deleted(5).slices(hasSize(5))); assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 0); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWhitelistTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWhitelistTests.java index 1a22fe53ef4..1a25e4e5e9c 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWhitelistTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWhitelistTests.java @@ -50,7 +50,7 @@ public class ReindexFromRemoteWhitelistTests extends ESTestCase { * Build a {@link RemoteInfo}, defaulting values that we don't care about in this test to values that don't hurt anything. */ private RemoteInfo newRemoteInfo(String host, int port) { - return new RemoteInfo(randomAsciiOfLength(5), host, port, new BytesArray("test"), null, null, emptyMap(), + return new RemoteInfo(randomAlphaOfLength(5), host, port, new BytesArray("test"), null, null, emptyMap(), RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); } @@ -64,7 +64,7 @@ public class ReindexFromRemoteWhitelistTests extends ESTestCase { public void testWhitelistedByPrefix() { checkRemoteWhitelist(buildRemoteWhitelist(singletonList("*.example.com:9200")), - new RemoteInfo(randomAsciiOfLength(5), "es.example.com", 9200, new BytesArray("test"), null, null, emptyMap(), + new RemoteInfo(randomAlphaOfLength(5), "es.example.com", 9200, new BytesArray("test"), null, null, emptyMap(), RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT)); checkRemoteWhitelist(buildRemoteWhitelist(singletonList("*.example.com:9200")), newRemoteInfo("6e134134a1.us-east-1.aws.example.com", 9200)); @@ -114,7 +114,7 @@ public class ReindexFromRemoteWhitelistTests extends ESTestCase { int size = between(1, 100); List whitelist = new ArrayList<>(size); for (int i = 0; i < size; i++) { - whitelist.add(randomAsciiOfLength(5) + ':' + between(1, Integer.MAX_VALUE)); + whitelist.add(randomAlphaOfLength(5) + ':' + between(1, Integer.MAX_VALUE)); } return whitelist; } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java index e0155ca6cab..c2b07239671 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java @@ -40,7 +40,7 @@ public class ReindexRequestTests extends AbstractBulkByScrollRequestTestCase request, String actionName) throws Exception { logger.info("Starting test for [{}] with [{}] slices", actionName, request.request().getSlices()); /* Add ten documents per slice so most slices will have many documents to process, having to go to multiple batches. - * we can't rely on all of them doing so, but + * we can't rely on all of them doing so, but */ List docs = new ArrayList<>(); for (int i = 0; i < request.request().getSlices() * 10; i++) { @@ -158,7 +159,7 @@ public class RethrottleTests extends ReindexTestCase { * are rethrottled, the finished ones just keep whatever requests per second they had while they were running. But it might * also be less than newRequestsPerSecond because the newRequestsPerSecond is divided among running sub-requests and then the * requests are rethrottled. If one request finishes in between the division and the application of the new throttle then it - * won't be rethrottled, thus only contributing its lower total. */ + * won't be rethrottled, thus only contributing its lower total. */ assertEquals(totalRequestsPerSecond, status.getRequestsPerSecond(), totalRequestsPerSecond * 0.0001f); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java index e94dcfeb122..6e8da59eee3 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java @@ -35,6 +35,8 @@ import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.reindex.remote.RemoteInfo; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -134,8 +136,8 @@ public class RetryTests extends ESSingleNodeTestCase { } public void testDeleteByQuery() throws Exception { - testCase(DeleteByQueryAction.NAME, DeleteByQueryAction.INSTANCE.newRequestBuilder(client()).source("source"), - matcher().deleted(DOC_COUNT)); + testCase(DeleteByQueryAction.NAME, DeleteByQueryAction.INSTANCE.newRequestBuilder(client()).source("source") + .filter(QueryBuilders.matchAllQuery()), matcher().deleted(DOC_COUNT)); } private void testCase(String action, AbstractBulkByScrollRequestBuilder request, BulkIndexByScrollResponseMatcher matcher) diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java index abc0d6e1f35..5793db023f0 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java @@ -56,17 +56,17 @@ public class RoundTripTests extends ESTestCase { reindex.getDestination().index("test"); if (randomBoolean()) { int port = between(1, Integer.MAX_VALUE); - BytesReference query = new BytesArray(randomAsciiOfLength(5)); - String username = randomBoolean() ? randomAsciiOfLength(5) : null; - String password = username != null && randomBoolean() ? randomAsciiOfLength(5) : null; + BytesReference query = new BytesArray(randomAlphaOfLength(5)); + String username = randomBoolean() ? randomAlphaOfLength(5) : null; + String password = username != null && randomBoolean() ? randomAlphaOfLength(5) : null; int headersCount = randomBoolean() ? 0 : between(1, 10); Map headers = new HashMap<>(headersCount); while (headers.size() < headersCount) { - headers.put(randomAsciiOfLength(5), randomAsciiOfLength(5)); + headers.put(randomAlphaOfLength(5), randomAlphaOfLength(5)); } TimeValue socketTimeout = parseTimeValue(randomPositiveTimeValue(), "socketTimeout"); TimeValue connectTimeout = parseTimeValue(randomPositiveTimeValue(), "connectTimeout"); - reindex.setRemoteInfo(new RemoteInfo(randomAsciiOfLength(5), randomAsciiOfLength(5), port, query, username, password, headers, + reindex.setRemoteInfo(new RemoteInfo(randomAlphaOfLength(5), randomAlphaOfLength(5), port, query, username, password, headers, socketTimeout, connectTimeout)); } ReindexRequest tripped = new ReindexRequest(); @@ -90,7 +90,7 @@ public class RoundTripTests extends ESTestCase { UpdateByQueryRequest update = new UpdateByQueryRequest(new SearchRequest()); randomRequest(update); if (randomBoolean()) { - update.setPipeline(randomAsciiOfLength(5)); + update.setPipeline(randomAlphaOfLength(5)); } UpdateByQueryRequest tripped = new UpdateByQueryRequest(); roundTrip(update, tripped); @@ -196,7 +196,7 @@ public class RoundTripTests extends ESTestCase { if (randomBoolean()) { request.setActions(randomFrom(UpdateByQueryAction.NAME, ReindexAction.NAME)); } else { - request.setTaskId(new TaskId(randomAsciiOfLength(5), randomLong())); + request.setTaskId(new TaskId(randomAlphaOfLength(5), randomLong())); } RethrottleRequest tripped = new RethrottleRequest(); roundTrip(request, tripped); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/TransportRethrottleActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/TransportRethrottleActionTests.java index 566358f4594..14ae9b5abb8 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/TransportRethrottleActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/TransportRethrottleActionTests.java @@ -68,7 +68,7 @@ public class TransportRethrottleActionTests extends ESTestCase { private void rethrottleTestCase(int runningSlices, Consumer> simulator, Consumer> verifier) { Client client = mock(Client.class); - String localNodeId = randomAsciiOfLength(5); + String localNodeId = randomAlphaOfLength(5); float newRequestsPerSecond = randomValueOtherThanMany(f -> f <= 0, () -> randomFloat()); @SuppressWarnings("unchecked") ActionListener listener = mock(ActionListener.class); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryRequestTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryRequestTests.java index 9ecb0089187..c932e83ce1c 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryRequestTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryRequestTests.java @@ -62,16 +62,16 @@ public class UpdateByQueryRequestTests extends AbstractBulkByScrollRequestTestCa @Override protected UpdateByQueryRequest newRequest() { - return new UpdateByQueryRequest(new SearchRequest(randomAsciiOfLength(5))); + return new UpdateByQueryRequest(new SearchRequest(randomAlphaOfLength(5))); } @Override protected void extraRandomizationForSlice(UpdateByQueryRequest original) { if (randomBoolean()) { - original.setScript(new Script(randomAsciiOfLength(5))); + original.setScript(new Script(randomAlphaOfLength(5))); } if (randomBoolean()) { - original.setPipeline(randomAsciiOfLength(5)); + original.setPipeline(randomAlphaOfLength(5)); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuildersTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuildersTests.java index f9ab72506a8..16f0c2a0a4e 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuildersTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuildersTests.java @@ -39,6 +39,7 @@ import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.initi import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.initialSearchParams; import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.initialSearchPath; import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.scrollEntity; +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.clearScrollEntity; import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.scrollParams; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.either; @@ -184,10 +185,28 @@ public class RemoteRequestBuildersTests extends ESTestCase { } public void testScrollEntity() throws IOException { - String scroll = randomAsciiOfLength(30); - HttpEntity entity = scrollEntity(scroll); + String scroll = randomAlphaOfLength(30); + HttpEntity entity = scrollEntity(scroll, Version.V_5_0_0); assertEquals(ContentType.APPLICATION_JSON.toString(), entity.getContentType().getValue()); assertThat(Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8)), containsString("\"" + scroll + "\"")); + + // Test with version < 2.0.0 + entity = scrollEntity(scroll, Version.fromId(1070499)); + assertEquals(ContentType.TEXT_PLAIN.toString(), entity.getContentType().getValue()); + assertEquals(scroll, Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8))); + } + + public void testClearScrollEntity() throws IOException { + String scroll = randomAlphaOfLength(30); + HttpEntity entity = clearScrollEntity(scroll, Version.V_5_0_0); + assertEquals(ContentType.APPLICATION_JSON.toString(), entity.getContentType().getValue()); + assertThat(Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8)), + containsString("\"" + scroll + "\"")); + + // Test with version < 2.0.0 + entity = clearScrollEntity(scroll, Version.fromId(1070499)); + assertEquals(ContentType.TEXT_PLAIN.toString(), entity.getContentType().getValue()); + assertEquals(scroll, Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8))); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java index eb7abea6af5..2a67306425c 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java @@ -370,7 +370,7 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { } public void testThreadContextRestored() throws Exception { - String header = randomAsciiOfLength(5); + String header = randomAlphaOfLength(5); threadPool.getThreadContext().putHeader("test", header); AtomicBoolean called = new AtomicBoolean(); sourceWithMockedRemoteCall("start_ok.json").doStart(r -> { diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/20_validation.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/20_validation.yaml index 4aa63facc24..7527db94842 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/20_validation.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/20_validation.yaml @@ -5,6 +5,19 @@ delete_by_query: index: _all +--- +"no query fails": + + - skip: + version: " - 5.99.99" + reason: explicit query is required since 6.0.0 + + - do: + catch: /query is missing/ + delete_by_query: + index: _all + body: {} + --- "invalid conflicts fails": - do: diff --git a/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/URLBlobStoreTests.java b/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/URLBlobStoreTests.java index 4df83c58bd3..5489eff71f4 100644 --- a/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/URLBlobStoreTests.java +++ b/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/URLBlobStoreTests.java @@ -52,7 +52,7 @@ public class URLBlobStoreTests extends ESTestCase { for (int i = 0; i < message.length; ++i) { message[i] = randomByte(); } - blobName = randomAsciiOfLength(8); + blobName = randomAlphaOfLength(8); httpServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress().getHostAddress(), 6001), 0); diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java index 439827fdb71..4b515aab869 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java @@ -39,6 +39,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.bytes.BytesReference; @@ -73,8 +74,10 @@ import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.concurrent.ExecutionException; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; import static org.elasticsearch.common.settings.Setting.byteSizeSetting; import static org.elasticsearch.common.settings.Setting.intSetting; @@ -393,9 +396,20 @@ public class Netty4Transport extends TcpTransport { } @Override - protected void sendMessage(Channel channel, BytesReference reference, Runnable sendListener) { + protected void sendMessage(Channel channel, BytesReference reference, ActionListener listener) { final ChannelFuture future = channel.writeAndFlush(Netty4Utils.toByteBuf(reference)); - future.addListener(f -> sendListener.run()); + future.addListener(f -> { + if (f.isSuccess()) { + listener.onResponse(channel); + } else { + Throwable cause = f.cause(); + // If the Throwable is an Error something has gone very wrong and Netty4MessageChannelHandler is + // going to cause that to bubble up and kill the process. + if (cause instanceof Exception) { + listener.onFailure((Exception) cause); + } + } + }); } @Override diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.5.0-snapshot-d00c5ca.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.5.0-snapshot-d00c5ca.jar.sha1 deleted file mode 100644 index 12314f68819..00000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.5.0-snapshot-d00c5ca.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -165f826617aa6cb7af67b2c3f87df3b46216a155 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.5.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.5.0.jar.sha1 new file mode 100644 index 00000000000..95df77a7521 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.5.0.jar.sha1 @@ -0,0 +1 @@ +3a71465f63887f871bc377d87a0838c29b0a857d \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.5.0-snapshot-d00c5ca.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.5.0-snapshot-d00c5ca.jar.sha1 deleted file mode 100644 index 8e844e3ad9b..00000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.5.0-snapshot-d00c5ca.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -50ed8c505a120bfcd1d5a7d3fae837027153f0dd \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.5.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.5.0.jar.sha1 new file mode 100644 index 00000000000..0c928699fc6 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.5.0.jar.sha1 @@ -0,0 +1 @@ +03353b0d030f6d5a63c4c0d5b64c770f5ba9d829 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.5.0-snapshot-d00c5ca.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.5.0-snapshot-d00c5ca.jar.sha1 deleted file mode 100644 index b1573a888d7..00000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.5.0-snapshot-d00c5ca.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f4c04ecad541aa9526c4e2bd4e98aa08898ffa1c \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.5.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.5.0.jar.sha1 new file mode 100644 index 00000000000..ba2bee28476 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.5.0.jar.sha1 @@ -0,0 +1 @@ +77ce4fb8c62688d8a094f08a07685c464ec46345 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.5.0-snapshot-d00c5ca.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.5.0-snapshot-d00c5ca.jar.sha1 deleted file mode 100644 index 9f1ab3b052c..00000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.5.0-snapshot-d00c5ca.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bc5ca65f0db1ec9f71481c6ad4e146bbf56df32e \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.5.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.5.0.jar.sha1 new file mode 100644 index 00000000000..0a0ae4cf401 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.5.0.jar.sha1 @@ -0,0 +1 @@ +60a780d900e48b0cead42d82fe405ad54bd658c3 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.5.0-snapshot-d00c5ca.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.5.0-snapshot-d00c5ca.jar.sha1 deleted file mode 100644 index ac599127442..00000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.5.0-snapshot-d00c5ca.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dae2a3e6b79197d4e48ee1ae8d0ef31b8b20069e \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.5.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.5.0.jar.sha1 new file mode 100644 index 00000000000..9a1387fa22f --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.5.0.jar.sha1 @@ -0,0 +1 @@ +894c42c011d291e72d14db660499c75281de9efd \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.5.0-snapshot-d00c5ca.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.5.0-snapshot-d00c5ca.jar.sha1 deleted file mode 100644 index fc2ac0b8a2f..00000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.5.0-snapshot-d00c5ca.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -318fcd0d1d33d45088ac3f4ab8291a4a22060078 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.5.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.5.0.jar.sha1 new file mode 100644 index 00000000000..89a0283d52e --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.5.0.jar.sha1 @@ -0,0 +1 @@ +72f0172cf947ab563a7c8166855cf7cbdfe33136 \ No newline at end of file diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoverTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoverTests.java index fc0234671f0..ad33f8ec218 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoverTests.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoverTests.java @@ -96,7 +96,6 @@ public class GceDiscoverTests extends ESIntegTestCase { .put("node.portsfile", "true") .put("cloud.gce.project_id", "testproject") .put("cloud.gce.zone", "primaryzone") - .put("discovery.initial_state_timeout", "1s") .put("cloud.gce.host", "http://" + httpServer.getAddress().getHostName() + ":" + httpServer.getAddress().getPort()) .put("cloud.gce.root_url", "https://" + httpsServer.getAddress().getHostName() + ":" + httpsServer.getAddress().getPort()) diff --git a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorFactoryTests.java b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorFactoryTests.java index 376214eb173..c5f2b0d7fbd 100644 --- a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorFactoryTests.java +++ b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.attachment; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; @@ -45,7 +44,7 @@ public class AttachmentProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); AttachmentProcessor processor = factory.create(null, processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); @@ -61,7 +60,7 @@ public class AttachmentProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("indexed_chars", indexedChars); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); AttachmentProcessor processor = factory.create(null, processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getIndexedChars(), is(indexedChars)); @@ -127,7 +126,7 @@ public class AttachmentProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("ignore_missing", true); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); AttachmentProcessor processor = factory.create(null, processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); diff --git a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java index e5b9d72017f..07e36998532 100644 --- a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java +++ b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java @@ -42,7 +42,6 @@ import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocumen import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; @@ -56,7 +55,7 @@ public class AttachmentProcessorTests extends ESTestCase { @Before public void createStandardProcessor() throws IOException { - processor = new AttachmentProcessor(randomAsciiOfLength(10), "source_field", + processor = new AttachmentProcessor(randomAlphaOfLength(10), "source_field", "target_field", EnumSet.allOf(AttachmentProcessor.Property.class), 10000, false); } @@ -89,7 +88,7 @@ public class AttachmentProcessorTests extends ESTestCase { if (randomBoolean()) { selectedProperties.add(AttachmentProcessor.Property.DATE); } - processor = new AttachmentProcessor(randomAsciiOfLength(10), "source_field", + processor = new AttachmentProcessor(randomAlphaOfLength(10), "source_field", "target_field", selectedProperties, 10000, false); Map attachmentData = parseDocument("htmlWithEmptyDateMeta.html", processor); @@ -243,7 +242,7 @@ public class AttachmentProcessorTests extends ESTestCase { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("source_field", null)); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); - Processor processor = new AttachmentProcessor(randomAsciiOfLength(10), "source_field", "randomTarget", null, 10, true); + Processor processor = new AttachmentProcessor(randomAlphaOfLength(10), "source_field", "randomTarget", null, 10, true); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); } @@ -251,7 +250,7 @@ public class AttachmentProcessorTests extends ESTestCase { public void testNonExistentWithIgnoreMissing() throws Exception { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); - Processor processor = new AttachmentProcessor(randomAsciiOfLength(10), "source_field", "randomTarget", null, 10, true); + Processor processor = new AttachmentProcessor(randomAlphaOfLength(10), "source_field", "randomTarget", null, 10, true); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); } @@ -260,7 +259,7 @@ public class AttachmentProcessorTests extends ESTestCase { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("source_field", null)); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); - Processor processor = new AttachmentProcessor(randomAsciiOfLength(10), "source_field", "randomTarget", null, 10, false); + Processor processor = new AttachmentProcessor(randomAlphaOfLength(10), "source_field", "randomTarget", null, 10, false); Exception exception = expectThrows(Exception.class, () -> processor.execute(ingestDocument)); assertThat(exception.getMessage(), equalTo("field [source_field] is null, cannot parse.")); } @@ -268,7 +267,7 @@ public class AttachmentProcessorTests extends ESTestCase { public void testNonExistentWithoutIgnoreMissing() throws Exception { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); - Processor processor = new AttachmentProcessor(randomAsciiOfLength(10), "source_field", "randomTarget", null, 10, false); + Processor processor = new AttachmentProcessor(randomAlphaOfLength(10), "source_field", "randomTarget", null, 10, false); Exception exception = expectThrows(Exception.class, () -> processor.execute(ingestDocument)); assertThat(exception.getMessage(), equalTo("field [source_field] not present as part of path [source_field]")); } diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 8db0d15f796..3904b043a52 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -76,7 +76,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); GeoIpProcessor processor = factory.create(null, processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); @@ -93,7 +93,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); config.put("ignore_missing", true); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); GeoIpProcessor processor = factory.create(null, processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); @@ -110,7 +110,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); config.put("database_file", "GeoLite2-Country.mmdb.gz"); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); GeoIpProcessor processor = factory.create(null, processorTag, config); diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index 221c06d3b68..54c504ca5c5 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.geoip; import com.maxmind.geoip2.DatabaseReader; -import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.test.ESTestCase; @@ -42,7 +41,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testCity() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); - GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", + GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); Map document = new HashMap<>(); @@ -69,7 +68,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testNullValueWithIgnoreMissing() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); - GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", + GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), true); IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("source_field", null)); @@ -80,7 +79,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testNonExistentWithIgnoreMissing() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); - GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", + GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), true); IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); @@ -90,7 +89,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testNullWithoutIgnoreMissing() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); - GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", + GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("source_field", null)); @@ -101,7 +100,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testNonExistentWithoutIgnoreMissing() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); - GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", + GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); @@ -111,7 +110,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testCity_withIpV6() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); - GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", + GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); String address = "2602:306:33d3:8000::3257:9652"; @@ -139,7 +138,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testCityWithMissingLocation() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); - GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", + GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); Map document = new HashMap<>(); @@ -156,7 +155,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testCountry() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-Country.mmdb.gz"); - GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", + GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); Map document = new HashMap<>(); @@ -176,7 +175,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testCountryWithMissingLocation() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-Country.mmdb.gz"); - GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", + GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); Map document = new HashMap<>(); @@ -193,7 +192,7 @@ public class GeoIpProcessorTests extends ESTestCase { public void testAddressIsNotInTheDatabase() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); - GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", + GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); Map document = new HashMap<>(); @@ -206,7 +205,7 @@ public class GeoIpProcessorTests extends ESTestCase { /** Don't silently do DNS lookups or anything trappy on bogus data */ public void testInvalid() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb.gz"); - GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", + GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); Map document = new HashMap<>(); diff --git a/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorFactoryTests.java b/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorFactoryTests.java index c1c46283076..4e0d0fb3695 100644 --- a/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorFactoryTests.java +++ b/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorFactoryTests.java @@ -79,7 +79,7 @@ public class UserAgentProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); UserAgentProcessor processor = factory.create(null, processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); @@ -99,7 +99,7 @@ public class UserAgentProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("ignore_missing", true); - String processorTag = randomAsciiOfLength(10); + String processorTag = randomAlphaOfLength(10); UserAgentProcessor processor = factory.create(null, processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); diff --git a/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorTests.java b/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorTests.java index 2c9e72ae9d9..92e94434ccd 100644 --- a/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorTests.java +++ b/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.ingest.useragent; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.useragent.UserAgentProcessor; import org.elasticsearch.test.ESTestCase; import org.junit.BeforeClass; @@ -46,14 +45,14 @@ public class UserAgentProcessorTests extends ESTestCase { InputStream regexStream = UserAgentProcessor.class.getResourceAsStream("/regexes.yaml"); assertNotNull(regexStream); - UserAgentParser parser = new UserAgentParser(randomAsciiOfLength(10), regexStream, new UserAgentCache(1000)); + UserAgentParser parser = new UserAgentParser(randomAlphaOfLength(10), regexStream, new UserAgentCache(1000)); - processor = new UserAgentProcessor(randomAsciiOfLength(10), "source_field", "target_field", parser, + processor = new UserAgentProcessor(randomAlphaOfLength(10), "source_field", "target_field", parser, EnumSet.allOf(UserAgentProcessor.Property.class), false); } public void testNullValueWithIgnoreMissing() throws Exception { - UserAgentProcessor processor = new UserAgentProcessor(randomAsciiOfLength(10), "source_field", "target_field", null, + UserAgentProcessor processor = new UserAgentProcessor(randomAlphaOfLength(10), "source_field", "target_field", null, EnumSet.allOf(UserAgentProcessor.Property.class), true); IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("source_field", null)); @@ -63,7 +62,7 @@ public class UserAgentProcessorTests extends ESTestCase { } public void testNonExistentWithIgnoreMissing() throws Exception { - UserAgentProcessor processor = new UserAgentProcessor(randomAsciiOfLength(10), "source_field", "target_field", null, + UserAgentProcessor processor = new UserAgentProcessor(randomAlphaOfLength(10), "source_field", "target_field", null, EnumSet.allOf(UserAgentProcessor.Property.class), true); IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); @@ -72,7 +71,7 @@ public class UserAgentProcessorTests extends ESTestCase { } public void testNullWithoutIgnoreMissing() throws Exception { - UserAgentProcessor processor = new UserAgentProcessor(randomAsciiOfLength(10), "source_field", "target_field", null, + UserAgentProcessor processor = new UserAgentProcessor(randomAlphaOfLength(10), "source_field", "target_field", null, EnumSet.allOf(UserAgentProcessor.Property.class), false); IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("source_field", null)); @@ -82,7 +81,7 @@ public class UserAgentProcessorTests extends ESTestCase { } public void testNonExistentWithoutIgnoreMissing() throws Exception { - UserAgentProcessor processor = new UserAgentProcessor(randomAsciiOfLength(10), "source_field", "target_field", null, + UserAgentProcessor processor = new UserAgentProcessor(randomAlphaOfLength(10), "source_field", "target_field", null, EnumSet.allOf(UserAgentProcessor.Property.class), false); IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index a2c3df17d80..14a0c37a200 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -23,8 +23,9 @@ esplugin { } dependencies { - compile 'com.microsoft.azure:azure-storage:4.0.0' - compile 'org.apache.commons:commons-lang3:3.3.2' + compile 'com.microsoft.azure:azure-storage:5.0.0' + compile 'com.microsoft.azure:azure-keyvault-core:0.8.0' + compile 'org.apache.commons:commons-lang3:3.4' } dependencyLicenses { diff --git a/plugins/repository-azure/licenses/azure-keyvault-core-0.8.0.jar.sha1 b/plugins/repository-azure/licenses/azure-keyvault-core-0.8.0.jar.sha1 new file mode 100644 index 00000000000..b86c58db842 --- /dev/null +++ b/plugins/repository-azure/licenses/azure-keyvault-core-0.8.0.jar.sha1 @@ -0,0 +1 @@ +35f7ac687462f491d0f8b0d96733dfe347493d70 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-storage-4.0.0.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-4.0.0.jar.sha1 deleted file mode 100644 index 9ef89531d6d..00000000000 --- a/plugins/repository-azure/licenses/azure-storage-4.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b31504f0fb3f9c4458ad053b426357a9b0df6e08 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-storage-5.0.0.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-5.0.0.jar.sha1 new file mode 100644 index 00000000000..9882cb80204 --- /dev/null +++ b/plugins/repository-azure/licenses/azure-storage-5.0.0.jar.sha1 @@ -0,0 +1 @@ +ba8f04bfeac08016c0f88423a202d0f3aac03aed \ No newline at end of file diff --git a/plugins/repository-azure/licenses/commons-lang3-3.3.2.jar.sha1 b/plugins/repository-azure/licenses/commons-lang3-3.3.2.jar.sha1 deleted file mode 100644 index bdd913cf235..00000000000 --- a/plugins/repository-azure/licenses/commons-lang3-3.3.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -90a3822c38ec8c996e84c16a3477ef632cbc87a3 diff --git a/plugins/repository-azure/licenses/commons-lang3-3.4.jar.sha1 b/plugins/repository-azure/licenses/commons-lang3-3.4.jar.sha1 new file mode 100644 index 00000000000..fdd7040377b --- /dev/null +++ b/plugins/repository-azure/licenses/commons-lang3-3.4.jar.sha1 @@ -0,0 +1 @@ +5fe28b9518e58819180a43a850fbc0dd24b7c050 \ No newline at end of file diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java index 73b0f07835c..0f32180860c 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java @@ -39,7 +39,6 @@ import java.net.URISyntaxException; import java.util.Locale; import java.util.Map; -import static org.elasticsearch.cloud.azure.storage.AzureStorageSettings.getValue; import static org.elasticsearch.repositories.azure.AzureRepository.Repository; public class AzureBlobStore extends AbstractComponent implements BlobStore { @@ -55,11 +54,11 @@ public class AzureBlobStore extends AbstractComponent implements BlobStore { AzureStorageService client) throws URISyntaxException, StorageException { super(settings); this.client = client; - this.container = getValue(metadata.settings(), settings, Repository.CONTAINER_SETTING, Storage.CONTAINER_SETTING); + this.container = Repository.CONTAINER_SETTING.get(metadata.settings()); this.repositoryName = metadata.name(); - this.accountName = getValue(metadata.settings(), settings, Repository.ACCOUNT_SETTING, Storage.ACCOUNT_SETTING); + this.accountName = Repository.ACCOUNT_SETTING.get(metadata.settings()); - String modeStr = getValue(metadata.settings(), settings, Repository.LOCATION_MODE_SETTING, Storage.LOCATION_MODE_SETTING); + String modeStr = Repository.LOCATION_MODE_SETTING.get(metadata.settings()); if (Strings.hasLength(modeStr)) { this.locMode = LocationMode.valueOf(modeStr.toUpperCase(Locale.ROOT)); } else { diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java index 6343541aed3..5b6575a4d14 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java @@ -21,7 +21,6 @@ package org.elasticsearch.cloud.azure.storage; import com.microsoft.azure.storage.LocationMode; import com.microsoft.azure.storage.StorageException; - import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -35,7 +34,6 @@ import java.io.InputStream; import java.io.OutputStream; import java.net.URISyntaxException; import java.util.Map; -import java.util.function.Function; /** * Azure Storage Service interface @@ -53,18 +51,6 @@ public interface AzureStorageService { public static final Setting TIMEOUT_SETTING = Setting.timeSetting("cloud.azure.storage.timeout", TimeValue.timeValueMinutes(-1), Property.NodeScope); - public static final Setting ACCOUNT_SETTING = - Setting.simpleString("repositories.azure.account", Property.NodeScope, Property.Filtered); - public static final Setting CONTAINER_SETTING = - new Setting<>("repositories.azure.container", "elasticsearch-snapshots", Function.identity(), Property.NodeScope); - public static final Setting BASE_PATH_SETTING = - Setting.simpleString("repositories.azure.base_path", Property.NodeScope); - public static final Setting LOCATION_MODE_SETTING = - Setting.simpleString("repositories.azure.location_mode", Property.NodeScope); - public static final Setting CHUNK_SIZE_SETTING = - Setting.byteSizeSetting("repositories.azure.chunk_size", MAX_CHUNK_SIZE, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE, Property.NodeScope); - public static final Setting COMPRESS_SETTING = - Setting.boolSetting("repositories.azure.compress", false, Property.NodeScope); } boolean doesContainerExist(String account, LocationMode mode, String container); diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java index e1eea1f57f4..594715b845c 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java @@ -21,6 +21,8 @@ package org.elasticsearch.cloud.azure.storage; import com.microsoft.azure.storage.CloudStorageAccount; import com.microsoft.azure.storage.LocationMode; +import com.microsoft.azure.storage.RetryExponentialRetry; +import com.microsoft.azure.storage.RetryPolicy; import com.microsoft.azure.storage.StorageException; import com.microsoft.azure.storage.blob.BlobProperties; import com.microsoft.azure.storage.blob.CloudBlobClient; @@ -147,6 +149,11 @@ public class AzureStorageServiceImpl extends AbstractComponent implements AzureS "]. It can not be longer than 2,147,483,647ms."); } } + + // We define a default exponential retry policy + client.getDefaultRequestOptions().setRetryPolicyFactory( + new RetryExponentialRetry(RetryPolicy.DEFAULT_CLIENT_BACKOFF, azureStorageSettings.getMaxRetries())); + return client; } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java index 5e0de46f65a..600d5fe97f8 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java @@ -19,20 +19,19 @@ package org.elasticsearch.cloud.azure.storage; +import com.microsoft.azure.storage.RetryPolicy; import org.elasticsearch.cloud.azure.storage.AzureStorageService.Storage; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.node.Node; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.function.Function; public final class AzureStorageSettings { private static final Setting TIMEOUT_SETTING = Setting.affixKeySetting(Storage.PREFIX, "timeout", @@ -43,20 +42,27 @@ public final class AzureStorageSettings { Setting.affixKeySetting(Storage.PREFIX, "key", (key) -> Setting.simpleString(key, Setting.Property.NodeScope)); private static final Setting DEFAULT_SETTING = Setting.affixKeySetting(Storage.PREFIX, "default", (key) -> Setting.boolSetting(key, false, Setting.Property.NodeScope)); - + /** + * max_retries: Number of retries in case of Azure errors. Defaults to 3 (RetryPolicy.DEFAULT_CLIENT_RETRY_COUNT). + */ + private static final Setting MAX_RETRIES_SETTING = + Setting.affixKeySetting(Storage.PREFIX, "max_retries", + (key) -> Setting.intSetting(key, RetryPolicy.DEFAULT_CLIENT_RETRY_COUNT, Setting.Property.NodeScope)); private final String name; private final String account; private final String key; private final TimeValue timeout; private final boolean activeByDefault; + private final int maxRetries; - public AzureStorageSettings(String name, String account, String key, TimeValue timeout, boolean activeByDefault) { + public AzureStorageSettings(String name, String account, String key, TimeValue timeout, boolean activeByDefault, int maxRetries) { this.name = name; this.account = account; this.key = key; this.timeout = timeout; this.activeByDefault = activeByDefault; + this.maxRetries = maxRetries; } public String getName() { @@ -79,6 +85,10 @@ public final class AzureStorageSettings { return activeByDefault; } + public int getMaxRetries() { + return maxRetries; + } + @Override public String toString() { final StringBuilder sb = new StringBuilder("AzureStorageSettings{"); @@ -87,6 +97,7 @@ public final class AzureStorageSettings { sb.append(", key='").append(key).append('\''); sb.append(", activeByDefault='").append(activeByDefault).append('\''); sb.append(", timeout=").append(timeout); + sb.append(", maxRetries=").append(maxRetries); sb.append('}'); return sb.toString(); } @@ -112,7 +123,8 @@ public final class AzureStorageSettings { getValue(settings, groupName, ACCOUNT_SETTING), getValue(settings, groupName, KEY_SETTING), getValue(settings, groupName, TIMEOUT_SETTING), - getValue(settings, groupName, DEFAULT_SETTING)) + getValue(settings, groupName, DEFAULT_SETTING), + getValue(settings, groupName, MAX_RETRIES_SETTING)) ); } return storageSettings; @@ -130,7 +142,8 @@ public final class AzureStorageSettings { } else if (settings.size() == 1) { // the only storage settings belong (implicitly) to the default primary storage AzureStorageSettings storage = settings.get(0); - return new AzureStorageSettings(storage.getName(), storage.getAccount(), storage.getKey(), storage.getTimeout(), true); + return new AzureStorageSettings(storage.getName(), storage.getAccount(), storage.getKey(), storage.getTimeout(), true, + storage.getMaxRetries()); } else { AzureStorageSettings primary = null; for (AzureStorageSettings setting : settings) { @@ -161,25 +174,4 @@ public final class AzureStorageSettings { } return Collections.unmodifiableMap(secondaries); } - - public static T getValue(Settings repositorySettings, - Settings globalSettings, - Setting repositorySetting, - Setting repositoriesSetting) { - if (repositorySetting.exists(repositorySettings)) { - return repositorySetting.get(repositorySettings); - } else { - return repositoriesSetting.get(globalSettings); - } - } - - public static Setting getEffectiveSetting(Settings repositorySettings, - Setting repositorySetting, - Setting repositoriesSetting) { - if (repositorySetting.exists(repositorySettings)) { - return repositorySetting; - } else { - return repositoriesSetting; - } - } } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java b/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java index f8953833074..bfed1fc254d 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java @@ -53,13 +53,7 @@ public class AzureRepositoryPlugin extends Plugin implements RepositoryPlugin { @Override public List> getSettings() { - return Arrays.asList(AzureStorageService.Storage.STORAGE_ACCOUNTS, - AzureStorageService.Storage.ACCOUNT_SETTING, - AzureStorageService.Storage.COMPRESS_SETTING, - AzureStorageService.Storage.CONTAINER_SETTING, - AzureStorageService.Storage.BASE_PATH_SETTING, - AzureStorageService.Storage.CHUNK_SIZE_SETTING, - AzureStorageService.Storage.LOCATION_MODE_SETTING); + return Collections.singletonList(AzureStorageService.Storage.STORAGE_ACCOUNTS); } @Override diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java index 50d087e713d..2b99e6a6f8e 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java @@ -23,7 +23,6 @@ import com.microsoft.azure.storage.LocationMode; import com.microsoft.azure.storage.StorageException; import org.elasticsearch.cloud.azure.blobstore.AzureBlobStore; import org.elasticsearch.cloud.azure.storage.AzureStorageService; -import org.elasticsearch.cloud.azure.storage.AzureStorageService.Storage; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.Strings; @@ -46,7 +45,6 @@ import java.util.function.Function; import static org.elasticsearch.cloud.azure.storage.AzureStorageService.MAX_CHUNK_SIZE; import static org.elasticsearch.cloud.azure.storage.AzureStorageService.MIN_CHUNK_SIZE; -import static org.elasticsearch.cloud.azure.storage.AzureStorageSettings.getValue; /** * Azure file system implementation of the BlobStoreRepository @@ -86,10 +84,10 @@ public class AzureRepository extends BlobStoreRepository { super(metadata, environment.settings(), namedXContentRegistry); blobStore = new AzureBlobStore(metadata, environment.settings(), storageService); - String container = getValue(metadata.settings(), settings, Repository.CONTAINER_SETTING, Storage.CONTAINER_SETTING); - this.chunkSize = getValue(metadata.settings(), settings, Repository.CHUNK_SIZE_SETTING, Storage.CHUNK_SIZE_SETTING); - this.compress = getValue(metadata.settings(), settings, Repository.COMPRESS_SETTING, Storage.COMPRESS_SETTING); - String modeStr = getValue(metadata.settings(), settings, Repository.LOCATION_MODE_SETTING, Storage.LOCATION_MODE_SETTING); + String container = Repository.CONTAINER_SETTING.get(metadata.settings()); + this.chunkSize = Repository.CHUNK_SIZE_SETTING.get(metadata.settings()); + this.compress = Repository.COMPRESS_SETTING.get(metadata.settings()); + String modeStr = Repository.LOCATION_MODE_SETTING.get(metadata.settings()); Boolean forcedReadonly = metadata.settings().getAsBoolean("readonly", null); // If the user explicitly did not define a readonly value, we set it by ourselves depending on the location mode setting. // For secondary_only setting, the repository should be read only @@ -104,7 +102,7 @@ public class AzureRepository extends BlobStoreRepository { readonly = forcedReadonly; } - String basePath = getValue(metadata.settings(), settings, Repository.BASE_PATH_SETTING, Storage.BASE_PATH_SETTING); + String basePath = Repository.BASE_PATH_SETTING.get(metadata.settings()); if (Strings.hasLength(basePath)) { // Remove starting / if any diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceTests.java index ba377c03c47..0452380ceb9 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.cloud.azure.storage; import com.microsoft.azure.storage.LocationMode; +import com.microsoft.azure.storage.RetryExponentialRetry; import com.microsoft.azure.storage.blob.CloudBlobClient; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; @@ -28,7 +29,9 @@ import java.net.URI; import java.net.URISyntaxException; import static org.elasticsearch.cloud.azure.storage.AzureStorageServiceImpl.blobNameFromUri; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class AzureStorageServiceTests extends ESTestCase { @@ -143,6 +146,31 @@ public class AzureStorageServiceTests extends ESTestCase { assertThat(client1.getDefaultRequestOptions().getTimeoutIntervalInMs(), is(nullValue())); } + public void testGetSelectedClientBackoffPolicy() { + Settings timeoutSettings = Settings.builder() + .put("cloud.azure.storage.azure.account", "myaccount") + .put("cloud.azure.storage.azure.key", "mykey") + .build(); + + AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(timeoutSettings); + CloudBlobClient client1 = azureStorageService.getSelectedClient("azure", LocationMode.PRIMARY_ONLY); + assertThat(client1.getDefaultRequestOptions().getRetryPolicyFactory(), is(notNullValue())); + assertThat(client1.getDefaultRequestOptions().getRetryPolicyFactory(), instanceOf(RetryExponentialRetry.class)); + } + + public void testGetSelectedClientBackoffPolicyNbRetries() { + Settings timeoutSettings = Settings.builder() + .put("cloud.azure.storage.azure.account", "myaccount") + .put("cloud.azure.storage.azure.key", "mykey") + .put("cloud.azure.storage.azure.max_retries", 7) + .build(); + + AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(timeoutSettings); + CloudBlobClient client1 = azureStorageService.getSelectedClient("azure", LocationMode.PRIMARY_ONLY); + assertThat(client1.getDefaultRequestOptions().getRetryPolicyFactory(), is(notNullValue())); + assertThat(client1.getDefaultRequestOptions().getRetryPolicyFactory(), instanceOf(RetryExponentialRetry.class)); + } + /** * This internal class just overload createClient method which is called by AzureStorageServiceImpl.doStart() */ diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStoreContainerTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStoreContainerTests.java index 091c92ffd31..f7c6fc2eac9 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStoreContainerTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStoreContainerTests.java @@ -29,7 +29,7 @@ import java.util.Locale; public class GoogleCloudStorageBlobStoreContainerTests extends ESBlobStoreContainerTestCase { @Override protected BlobStore newBlobStore() throws IOException { - String bucket = randomAsciiOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + String bucket = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucket, MockHttpTransport.newStorage(bucket, getTestName())); } } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStoreTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStoreTests.java index b5489466b51..65add371855 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStoreTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStoreTests.java @@ -30,7 +30,7 @@ public class GoogleCloudStorageBlobStoreTests extends ESBlobStoreTestCase { @Override protected BlobStore newBlobStore() throws IOException { - String bucket = randomAsciiOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + String bucket = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucket, MockHttpTransport.newStorage(bucket, getTestName())); } } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStoreContainerTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStoreContainerTests.java index 98dd43b6f4d..187ce9b23b4 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStoreContainerTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStoreContainerTests.java @@ -31,7 +31,7 @@ import java.util.Locale; public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase { protected BlobStore newBlobStore() throws IOException { MockAmazonS3 client = new MockAmazonS3(); - String bucket = randomAsciiOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + String bucket = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); return new S3BlobStore(Settings.EMPTY, client, bucket, false, new ByteSizeValue(10, ByteSizeUnit.MB), 5, "public-read-write", "standard"); diff --git a/qa/backwards-5.0/build.gradle b/qa/backwards-5.0/build.gradle index 90dd2f2d8bb..b68573ef389 100644 --- a/qa/backwards-5.0/build.gradle +++ b/qa/backwards-5.0/build.gradle @@ -20,19 +20,7 @@ apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-test' -/* This project runs the core REST tests against a 2 node cluster where one of the nodes has a different minor. - * Since we don't have a version to test against we currently use the hardcoded snapshot for to basically run - * against ourselves. To test that using a different version go to distribution/zip and execute: - * gradle clean publishToMavenLocal -Dbuild.snapshot=false - * - * This installs the release-build into a local .m2 repository, then change this version here to: - * bwcVersion = "5.0.0" - * - * now you can run the bwc tests with: - * gradle check -Drepos.mavenlocal=true - * - * (-Drepos.mavenlocal=true will force gradle to look for the zip distribution in the local .m2 repository) - */ +/* This project runs the core REST tests against a 2 node cluster where one of the nodes has a different minor. */ integTest { includePackaged = true } @@ -40,12 +28,7 @@ integTest { integTestCluster { numNodes = 4 numBwcNodes = 2 - bwcVersion = "5.4.0-SNAPSHOT" + bwcVersion = project.bwcVersion setting 'logger.org.elasticsearch', 'DEBUG' } -repositories { - maven { - url "https://oss.sonatype.org/content/repositories/snapshots/" - } -} diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilBootstrapChecksTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilBootstrapChecksTests.java index 79c3f9248b5..8e346bf7d9c 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilBootstrapChecksTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilBootstrapChecksTests.java @@ -92,7 +92,7 @@ public class EvilBootstrapChecksTests extends ESTestCase { } public void testInvalidValue() { - final String value = randomAsciiOfLength(8); + final String value = randomAlphaOfLength(8); setEsEnforceBootstrapChecks(value); final boolean enforceLimits = randomBoolean(); final IllegalArgumentException e = expectThrows( diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java index 2c00030973b..02caeca8089 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java @@ -33,7 +33,7 @@ public class EvilElasticsearchCliTests extends ESElasticsearchCliTestCase { @SuppressForbidden(reason = "manipulates system properties for testing") public void testPathHome() throws Exception { final String pathHome = System.getProperty("es.path.home"); - final String value = randomAsciiOfLength(16); + final String value = randomAlphaOfLength(16); System.setProperty("es.path.home", value); runTest( @@ -48,7 +48,7 @@ public class EvilElasticsearchCliTests extends ESElasticsearchCliTestCase { }); System.clearProperty("es.path.home"); - final String commandLineValue = randomAsciiOfLength(16); + final String commandLineValue = randomAlphaOfLength(16); runTest( ExitCodes.OK, true, diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerConfigurationTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerConfigurationTests.java index 9cd6ec630ad..6270111a097 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerConfigurationTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerConfigurationTests.java @@ -179,7 +179,7 @@ public class EvilLoggerConfigurationTests extends ESTestCase { assertThat(loggerConfigs, hasKey("bar")); assertThat(loggerConfigs.get("bar").getLevel(), equalTo(barLevel)); - assertThat(ctx.getLogger(randomAsciiOfLength(16)).getLevel(), equalTo(rootLevel)); + assertThat(ctx.getLogger(randomAlphaOfLength(16)).getLevel(), equalTo(rootLevel)); } } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java index f02ce6031c3..6246f3e1dd4 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java @@ -37,16 +37,12 @@ import org.elasticsearch.node.Node; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.hamcrest.RegexMatcher; -import javax.management.MBeanServerPermission; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.nio.file.Files; import java.nio.file.Path; -import java.security.AccessControlException; -import java.security.Permission; import java.util.List; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -126,7 +122,7 @@ public class EvilLoggerTests extends ESTestCase { public void testPrefixLogger() throws IOException, IllegalAccessException, UserException { setupLogging("prefix"); - final String prefix = randomBoolean() ? null : randomAsciiOfLength(16); + final String prefix = randomBoolean() ? null : randomAlphaOfLength(16); final Logger logger = Loggers.getLogger("prefix", prefix); logger.info("test"); logger.info("{}", "test"); @@ -156,9 +152,9 @@ public class EvilLoggerTests extends ESTestCase { } public void testProperties() throws IOException, UserException { - final Settings.Builder builder = Settings.builder().put("cluster.name", randomAsciiOfLength(16)); + final Settings.Builder builder = Settings.builder().put("cluster.name", randomAlphaOfLength(16)); if (randomBoolean()) { - builder.put("node.name", randomAsciiOfLength(16)); + builder.put("node.name", randomAlphaOfLength(16)); } final Settings settings = builder.build(); setupLogging("minimal", settings); diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java index 10918eea189..7ebc2f0709b 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java @@ -25,6 +25,7 @@ import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; import java.util.Arrays; +import java.util.Locale; import java.util.stream.Collectors; import org.apache.lucene.util.LuceneTestCase; @@ -72,25 +73,39 @@ public class ListPluginsCommandTests extends ESTestCase { return terminal; } - static String buildMultiline(String... args){ - return Arrays.asList(args).stream().collect(Collectors.joining("\n", "", "\n")); + private static String buildMultiline(String... args){ + return Arrays.stream(args).collect(Collectors.joining("\n", "", "\n")); } - static void buildFakePlugin(Environment env, String description, String name, String classname) throws IOException { - PluginTestUtil.writeProperties(env.pluginsFile().resolve(name), + private static void buildFakePlugin( + final Environment env, + final String description, + final String name, + final String classname) throws IOException { + buildFakePlugin(env, description, name, classname, false); + } + + private static void buildFakePlugin( + final Environment env, + final String description, + final String name, + final String classname, + final boolean hasNativeController) throws IOException { + PluginTestUtil.writeProperties( + env.pluginsFile().resolve(name), "description", description, "name", name, "version", "1.0", "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), - "classname", classname); + "classname", classname, + "has.native.controller", Boolean.toString(hasNativeController)); } - public void testPluginsDirMissing() throws Exception { Files.delete(env.pluginsFile()); IOException e = expectThrows(IOException.class, () -> listPlugins(home)); - assertEquals(e.getMessage(), "Plugins directory missing: " + env.pluginsFile()); + assertEquals("Plugins directory missing: " + env.pluginsFile(), e.getMessage()); } public void testNoPlugins() throws Exception { @@ -101,22 +116,48 @@ public class ListPluginsCommandTests extends ESTestCase { public void testOnePlugin() throws Exception { buildFakePlugin(env, "fake desc", "fake", "org.fake"); MockTerminal terminal = listPlugins(home); - assertEquals(terminal.getOutput(), buildMultiline("fake")); + assertEquals(buildMultiline("fake"), terminal.getOutput()); } public void testTwoPlugins() throws Exception { buildFakePlugin(env, "fake desc", "fake1", "org.fake"); buildFakePlugin(env, "fake desc 2", "fake2", "org.fake"); MockTerminal terminal = listPlugins(home); - assertEquals(terminal.getOutput(), buildMultiline("fake1", "fake2")); + assertEquals(buildMultiline("fake1", "fake2"), terminal.getOutput()); } public void testPluginWithVerbose() throws Exception { buildFakePlugin(env, "fake desc", "fake_plugin", "org.fake"); String[] params = { "-v" }; MockTerminal terminal = listPlugins(home, params); - assertEquals(terminal.getOutput(), buildMultiline("Plugins directory: " + env.pluginsFile(), "fake_plugin", - "- Plugin information:", "Name: fake_plugin", "Description: fake desc", "Version: 1.0", " * Classname: org.fake")); + assertEquals( + buildMultiline( + "Plugins directory: " + env.pluginsFile(), + "fake_plugin", + "- Plugin information:", + "Name: fake_plugin", + "Description: fake desc", + "Version: 1.0", + "Native Controller: false", + " * Classname: org.fake"), + terminal.getOutput()); + } + + public void testPluginWithNativeController() throws Exception { + buildFakePlugin(env, "fake desc 1", "fake_plugin1", "org.fake", true); + String[] params = { "-v" }; + MockTerminal terminal = listPlugins(home, params); + assertEquals( + buildMultiline( + "Plugins directory: " + env.pluginsFile(), + "fake_plugin1", + "- Plugin information:", + "Name: fake_plugin1", + "Description: fake desc 1", + "Version: 1.0", + "Native Controller: true", + " * Classname: org.fake"), + terminal.getOutput()); } public void testPluginWithVerboseMultiplePlugins() throws Exception { @@ -124,10 +165,24 @@ public class ListPluginsCommandTests extends ESTestCase { buildFakePlugin(env, "fake desc 2", "fake_plugin2", "org.fake2"); String[] params = { "-v" }; MockTerminal terminal = listPlugins(home, params); - assertEquals(terminal.getOutput(), buildMultiline("Plugins directory: " + env.pluginsFile(), - "fake_plugin1", "- Plugin information:", "Name: fake_plugin1", "Description: fake desc 1", "Version: 1.0", - " * Classname: org.fake", "fake_plugin2", "- Plugin information:", "Name: fake_plugin2", - "Description: fake desc 2", "Version: 1.0", " * Classname: org.fake2")); + assertEquals( + buildMultiline( + "Plugins directory: " + env.pluginsFile(), + "fake_plugin1", + "- Plugin information:", + "Name: fake_plugin1", + "Description: fake desc 1", + "Version: 1.0", + "Native Controller: false", + " * Classname: org.fake", + "fake_plugin2", + "- Plugin information:", + "Name: fake_plugin2", + "Description: fake desc 2", + "Version: 1.0", + "Native Controller: false", + " * Classname: org.fake2"), + terminal.getOutput()); } public void testPluginWithoutVerboseMultiplePlugins() throws Exception { @@ -135,21 +190,26 @@ public class ListPluginsCommandTests extends ESTestCase { buildFakePlugin(env, "fake desc 2", "fake_plugin2", "org.fake2"); MockTerminal terminal = listPlugins(home, new String[0]); String output = terminal.getOutput(); - assertEquals(output, buildMultiline("fake_plugin1", "fake_plugin2")); + assertEquals(buildMultiline("fake_plugin1", "fake_plugin2"), output); } public void testPluginWithoutDescriptorFile() throws Exception{ - Files.createDirectories(env.pluginsFile().resolve("fake1")); + final Path pluginDir = env.pluginsFile().resolve("fake1"); + Files.createDirectories(pluginDir); NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> listPlugins(home)); - assertEquals(e.getFile(), env.pluginsFile().resolve("fake1").resolve(PluginInfo.ES_PLUGIN_PROPERTIES).toString()); + assertEquals(pluginDir.resolve(PluginInfo.ES_PLUGIN_PROPERTIES).toString(), e.getFile()); } public void testPluginWithWrongDescriptorFile() throws Exception{ - PluginTestUtil.writeProperties(env.pluginsFile().resolve("fake1"), - "description", "fake desc"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> listPlugins(home)); - assertEquals(e.getMessage(), "Property [name] is missing in [" + - env.pluginsFile().resolve("fake1").resolve(PluginInfo.ES_PLUGIN_PROPERTIES).toString() + "]"); + final Path pluginDir = env.pluginsFile().resolve("fake1"); + PluginTestUtil.writeProperties(pluginDir, "description", "fake desc"); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> listPlugins(home)); + final Path descriptorPath = pluginDir.resolve(PluginInfo.ES_PLUGIN_PROPERTIES); + assertEquals( + "property [name] is missing in [" + descriptorPath.toString() + "]", + e.getMessage()); } public void testExistingIncompatiblePlugin() throws Exception { @@ -163,11 +223,14 @@ public class ListPluginsCommandTests extends ESTestCase { buildFakePlugin(env, "fake desc 2", "fake_plugin2", "org.fake2"); MockTerminal terminal = listPlugins(home); - assertEquals("fake_plugin1\n" + - "WARNING: Plugin [fake_plugin1] is incompatible with Elasticsearch [" + - Version.CURRENT.toString() + "]. Was designed for version [1.0.0]\n" + - "fake_plugin2\n", - terminal.getOutput()); + final String message = String.format(Locale.ROOT, + "plugin [%s] is incompatible with version [%s]; was designed for version [%s]", + "fake_plugin1", + Version.CURRENT.toString(), + "1.0.0"); + assertEquals( + "fake_plugin1\n" + "WARNING: " + message + "\n" + "fake_plugin2\n", + terminal.getOutput()); String[] params = {"-s"}; terminal = listPlugins(home, params); diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginSecurityTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginSecurityTests.java index 466f7d05cd1..77ecd12f786 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginSecurityTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginSecurityTests.java @@ -19,60 +19,132 @@ package org.elasticsearch.plugins; +import org.apache.lucene.util.LuceneTestCase; +import org.elasticsearch.Version; +import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.cli.Terminal; import org.elasticsearch.test.ESTestCase; +import java.io.IOException; import java.nio.file.Path; import java.security.Permission; import java.security.PermissionCollection; import java.security.Permissions; import java.util.Collections; import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasToString; +import static org.hamcrest.Matchers.not; /** Tests plugin manager security check */ public class PluginSecurityTests extends ESTestCase { + private final Supplier tmpFile = LuceneTestCase::createTempDir; + + public void testHasNativeController() throws IOException { + assumeTrue( + "test cannot run with security manager enabled", + System.getSecurityManager() == null); + final PluginInfo info = + new PluginInfo("fake", "fake", Version.CURRENT.toString(), "Fake", true); + final MockTerminal terminal = new MockTerminal(); + terminal.addTextInput("y"); + terminal.addTextInput("y"); + final Path policyFile = this.getDataPath("security/simple-plugin-security.policy"); + PluginSecurity.readPolicy(info, policyFile, terminal, tmpFile, false); + final String output = terminal.getOutput(); + assertThat(output, containsString("plugin forks a native controller")); + } + + public void testDeclineNativeController() throws IOException { + assumeTrue( + "test cannot run with security manager enabled", + System.getSecurityManager() == null); + final PluginInfo info = + new PluginInfo("fake", "fake", Version.CURRENT.toString(), "Fake", true); + final MockTerminal terminal = new MockTerminal(); + terminal.addTextInput("y"); + terminal.addTextInput("n"); + final Path policyFile = this.getDataPath("security/simple-plugin-security.policy"); + RuntimeException e = expectThrows( + RuntimeException.class, + () -> PluginSecurity.readPolicy(info, policyFile, terminal, tmpFile, false)); + assertThat(e, hasToString(containsString("installation aborted by user"))); + } + + public void testDoesNotHaveNativeController() throws IOException { + assumeTrue( + "test cannot run with security manager enabled", + System.getSecurityManager() == null); + final PluginInfo info = + new PluginInfo("fake", "fake", Version.CURRENT.toString(), "Fake", false); + final MockTerminal terminal = new MockTerminal(); + terminal.addTextInput("y"); + final Path policyFile = this.getDataPath("security/simple-plugin-security.policy"); + PluginSecurity.readPolicy(info, policyFile, terminal, tmpFile, false); + final String output = terminal.getOutput(); + assertThat(output, not(containsString("plugin forks a native controller"))); + } + /** Test that we can parse the set of permissions correctly for a simple policy */ public void testParsePermissions() throws Exception { - assumeTrue("test cannot run with security manager enabled", System.getSecurityManager() == null); + assumeTrue( + "test cannot run with security manager enabled", + System.getSecurityManager() == null); Path scratch = createTempDir(); Path testFile = this.getDataPath("security/simple-plugin-security.policy"); Permissions expected = new Permissions(); expected.add(new RuntimePermission("queuePrintJob")); - PermissionCollection actual = PluginSecurity.parsePermissions(Terminal.DEFAULT, testFile, scratch); + PermissionCollection actual = + PluginSecurity.parsePermissions(Terminal.DEFAULT, testFile, scratch); assertEquals(expected, actual); } /** Test that we can parse the set of permissions correctly for a complex policy */ public void testParseTwoPermissions() throws Exception { - assumeTrue("test cannot run with security manager enabled", System.getSecurityManager() == null); + assumeTrue( + "test cannot run with security manager enabled", + System.getSecurityManager() == null); Path scratch = createTempDir(); Path testFile = this.getDataPath("security/complex-plugin-security.policy"); Permissions expected = new Permissions(); expected.add(new RuntimePermission("getClassLoader")); expected.add(new RuntimePermission("closeClassLoader")); - PermissionCollection actual = PluginSecurity.parsePermissions(Terminal.DEFAULT, testFile, scratch); + PermissionCollection actual = + PluginSecurity.parsePermissions(Terminal.DEFAULT, testFile, scratch); assertEquals(expected, actual); } /** Test that we can format some simple permissions properly */ public void testFormatSimplePermission() throws Exception { - assertEquals("java.lang.RuntimePermission queuePrintJob", PluginSecurity.formatPermission(new RuntimePermission("queuePrintJob"))); + assertEquals( + "java.lang.RuntimePermission queuePrintJob", + PluginSecurity.formatPermission(new RuntimePermission("queuePrintJob"))); } /** Test that we can format an unresolved permission properly */ public void testFormatUnresolvedPermission() throws Exception { - assumeTrue("test cannot run with security manager enabled", System.getSecurityManager() == null); + assumeTrue( + "test cannot run with security manager enabled", + System.getSecurityManager() == null); Path scratch = createTempDir(); Path testFile = this.getDataPath("security/unresolved-plugin-security.policy"); - PermissionCollection actual = PluginSecurity.parsePermissions(Terminal.DEFAULT, testFile, scratch); + PermissionCollection actual = + PluginSecurity.parsePermissions(Terminal.DEFAULT, testFile, scratch); List permissions = Collections.list(actual.elements()); assertEquals(1, permissions.size()); - assertEquals("org.fake.FakePermission fakeName", PluginSecurity.formatPermission(permissions.get(0))); + assertEquals( + "org.fake.FakePermission fakeName", + PluginSecurity.formatPermission(permissions.get(0))); } /** no guaranteed equals on these classes, we assert they contain the same set */ private void assertEquals(PermissionCollection expected, PermissionCollection actual) { - assertEquals(asSet(Collections.list(expected.elements())), asSet(Collections.list(actual.elements()))); + assertEquals( + asSet(Collections.list(expected.elements())), + asSet(Collections.list(actual.elements()))); } + } diff --git a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java index 743d2408b9d..f81bd3b2d47 100644 --- a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java +++ b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java @@ -21,8 +21,11 @@ package org.elasticsearch.bootstrap; import org.apache.lucene.util.Constants; import org.apache.lucene.util.LuceneTestCase; +import org.elasticsearch.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.plugins.PluginTestUtil; +import org.elasticsearch.plugins.Platforms; import java.io.BufferedReader; import java.io.IOException; @@ -36,11 +39,15 @@ import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + /** * Create a simple "daemon controller", put it in the right place and check that it runs. * - * Extends LuceneTestCase rather than ESTestCase as ESTestCase installs a system call filter, and that prevents the Spawner class doing its - * job. Also needs to run in a separate JVM to other tests that extend ESTestCase for the same reason. + * Extends LuceneTestCase rather than ESTestCase as ESTestCase installs a system call filter, and + * that prevents the Spawner class from doing its job. Also needs to run in a separate JVM to other + * tests that extend ESTestCase for the same reason. */ public class SpawnerNoBootstrapTests extends LuceneTestCase { @@ -64,10 +71,19 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase { // This plugin will NOT have a controller daemon Path plugin = environment.pluginsFile().resolve("a_plugin"); Files.createDirectories(plugin); + PluginTestUtil.writeProperties( + plugin, + "description", "a_plugin", + "version", Version.CURRENT.toString(), + "elasticsearch.version", Version.CURRENT.toString(), + "name", "a_plugin", + "java.version", "1.8", + "classname", "APlugin", + "has.native.controller", "false"); try (Spawner spawner = new Spawner()) { spawner.spawnNativePluginControllers(environment); - assertTrue(spawner.getProcesses().isEmpty()); + assertThat(spawner.getProcesses(), hasSize(0)); } } @@ -75,10 +91,10 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase { * Two plugins - one with a controller daemon and one without. */ public void testControllerSpawn() throws IOException, InterruptedException { - // On Windows you cannot directly run a batch file - you have to run cmd.exe with the batch file - // as an argument and that's out of the remit of the controller daemon process spawner. If - // you need to build on Windows, just don't run this test. The process spawner itself will work - // with native processes. + /* + * On Windows you can not directly run a batch file - you have to run cmd.exe with the batch + * file as an argument and that's out of the remit of the controller daemon process spawner. + */ assumeFalse("This test does not work on Windows", Constants.WINDOWS); Path esHome = createTempDir().resolve("esHome"); @@ -88,32 +104,90 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase { Environment environment = new Environment(settings); - // This plugin WILL have a controller daemon + // this plugin will have a controller daemon Path plugin = environment.pluginsFile().resolve("test_plugin"); Files.createDirectories(plugin); - Path controllerProgram = Spawner.makeSpawnPath(plugin); + PluginTestUtil.writeProperties( + plugin, + "description", "test_plugin", + "version", Version.CURRENT.toString(), + "elasticsearch.version", Version.CURRENT.toString(), + "name", "test_plugin", + "java.version", "1.8", + "classname", "TestPlugin", + "has.native.controller", "true"); + Path controllerProgram = Platforms.nativeControllerPath(plugin); createControllerProgram(controllerProgram); - // This plugin will NOT have a controller daemon + // this plugin will not have a controller daemon Path otherPlugin = environment.pluginsFile().resolve("other_plugin"); Files.createDirectories(otherPlugin); + PluginTestUtil.writeProperties( + otherPlugin, + "description", "other_plugin", + "version", Version.CURRENT.toString(), + "elasticsearch.version", Version.CURRENT.toString(), + "name", "other_plugin", + "java.version", "1.8", + "classname", "OtherPlugin", + "has.native.controller", "false"); Spawner spawner = new Spawner(); spawner.spawnNativePluginControllers(environment); List processes = spawner.getProcesses(); - // 1 because there should only be a reference in the list for the plugin that had the controller daemon, not the other plugin - assertEquals(1, processes.size()); + /* + * As there should only be a reference in the list for the plugin that had the controller + * daemon, we expect one here. + */ + assertThat(processes, hasSize(1)); Process process = processes.get(0); - try (BufferedReader stdoutReader = new BufferedReader(new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8))) { + final InputStreamReader in = + new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8); + try (BufferedReader stdoutReader = new BufferedReader(in)) { String line = stdoutReader.readLine(); assertEquals("I am alive", line); spawner.close(); - // Fail if the process doesn't die within 1 second - usually it will be even quicker but it depends on OS scheduling + /* + * Fail if the process does not die within one second; usually it will be even quicker + * but it depends on OS scheduling. + */ assertTrue(process.waitFor(1, TimeUnit.SECONDS)); } } + public void testControllerSpawnWithIncorrectDescriptor() throws IOException { + // this plugin will have a controller daemon + Path esHome = createTempDir().resolve("esHome"); + Settings.Builder settingsBuilder = Settings.builder(); + settingsBuilder.put(Environment.PATH_HOME_SETTING.getKey(), esHome.toString()); + Settings settings = settingsBuilder.build(); + + Environment environment = new Environment(settings); + + Path plugin = environment.pluginsFile().resolve("test_plugin"); + Files.createDirectories(plugin); + PluginTestUtil.writeProperties( + plugin, + "description", "test_plugin", + "version", Version.CURRENT.toString(), + "elasticsearch.version", Version.CURRENT.toString(), + "name", "test_plugin", + "java.version", "1.8", + "classname", "TestPlugin", + "has.native.controller", "false"); + Path controllerProgram = Platforms.nativeControllerPath(plugin); + createControllerProgram(controllerProgram); + + Spawner spawner = new Spawner(); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> spawner.spawnNativePluginControllers(environment)); + assertThat( + e.getMessage(), + equalTo("plugin [test_plugin] does not have permission to fork native controller")); + } + private void createControllerProgram(Path outputFile) throws IOException { Path outputDir = outputFile.getParent(); Files.createDirectories(outputDir); @@ -128,4 +202,5 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase { perms.add(PosixFilePermission.OTHERS_EXECUTE); Files.setPosixFilePermissions(outputFile, perms); } + } diff --git a/qa/rolling-upgrade/build.gradle b/qa/rolling-upgrade/build.gradle index f4cde58d755..9b8259eecb5 100644 --- a/qa/rolling-upgrade/build.gradle +++ b/qa/rolling-upgrade/build.gradle @@ -27,7 +27,7 @@ task oldClusterTest(type: RestIntegTestTask) { oldClusterTestCluster { distribution = 'zip' - bwcVersion = '5.4.0-SNAPSHOT' // TODO: either randomize, or make this settable with sysprop + bwcVersion = project.bwcVersion // TODO: either randomize, or make this settable with sysprop numBwcNodes = 2 numNodes = 2 clusterName = 'rolling-upgrade' diff --git a/qa/smoke-test-client/build.gradle b/qa/smoke-test-client/build.gradle index 888d9325242..e4d197e7e6a 100644 --- a/qa/smoke-test-client/build.gradle +++ b/qa/smoke-test-client/build.gradle @@ -1,3 +1,5 @@ +import org.elasticsearch.gradle.test.RestIntegTestTask + /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -25,3 +27,16 @@ apply plugin: 'elasticsearch.rest-test' dependencies { testCompile project(path: ':client:transport', configuration: 'runtime') // randomly swapped in as a transport } + +task singleNodeIntegTest(type: RestIntegTestTask) { + mustRunAfter(precommit) +} + +singleNodeIntegTestCluster { + numNodes = 1 + setting 'discovery.type', 'single-node' +} + +integTest.dependsOn(singleNodeIntegTestRunner, 'singleNodeIntegTestCluster#stop') + +check.dependsOn(integTest) diff --git a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/SmokeTestClientIT.java b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/SmokeTestClientIT.java index 6380ed90e18..2e86ce82221 100644 --- a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/SmokeTestClientIT.java +++ b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/SmokeTestClientIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.smoketest; -import org.apache.lucene.util.Constants; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; @@ -29,35 +28,29 @@ import static org.hamcrest.Matchers.greaterThan; public class SmokeTestClientIT extends ESSmokeClientTestCase { - // needed to avoid the test suite from failing for having no tests - // TODO: remove when Netty 4.1.5 is upgraded to Netty 4.1.6 including https://github.com/netty/netty/pull/5778 - public void testSoThatTestsDoNotFail() { - - } - /** * Check that we are connected to a cluster named "elasticsearch". */ public void testSimpleClient() { - // TODO: remove when Netty 4.1.5 is upgraded to Netty 4.1.6 including https://github.com/netty/netty/pull/5778 - assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9); - Client client = getClient(); + final Client client = getClient(); // START SNIPPET: java-doc-admin-cluster-health - ClusterHealthResponse health = client.admin().cluster().prepareHealth().setWaitForYellowStatus().get(); - String clusterName = health.getClusterName(); - int numberOfNodes = health.getNumberOfNodes(); + final ClusterHealthResponse health = + client.admin().cluster().prepareHealth().setWaitForYellowStatus().get(); + final String clusterName = health.getClusterName(); + final int numberOfNodes = health.getNumberOfNodes(); // END SNIPPET: java-doc-admin-cluster-health - assertThat("cluster [" + clusterName + "] should have at least 1 node", numberOfNodes, greaterThan(0)); + assertThat( + "cluster [" + clusterName + "] should have at least 1 node", + numberOfNodes, + greaterThan(0)); } /** * Create an index and index some docs */ public void testPutDocument() { - // TODO: remove when Netty 4.1.5 is upgraded to Netty 4.1.6 including https://github.com/netty/netty/pull/5778 - assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9); - Client client = getClient(); + final Client client = getClient(); // START SNIPPET: java-doc-index-doc-simple client.prepareIndex(index, "doc", "1") // Index, Type, Id @@ -71,7 +64,7 @@ public class SmokeTestClientIT extends ESSmokeClientTestCase { // END SNIPPET: java-doc-admin-indices-refresh // START SNIPPET: java-doc-search-simple - SearchResponse searchResponse = client.prepareSearch(index).get(); + final SearchResponse searchResponse = client.prepareSearch(index).get(); assertThat(searchResponse.getHits().getTotalHits(), is(1L)); // END SNIPPET: java-doc-search-simple } diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java index 397838862c3..28b5825513f 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java @@ -76,9 +76,9 @@ import static org.hamcrest.Matchers.is; public class ContextAndHeaderTransportIT extends HttpSmokeTestCase { private static final List requests = new CopyOnWriteArrayList<>(); private static final String CUSTOM_HEADER = "SomeCustomHeader"; - private String randomHeaderValue = randomAsciiOfLength(20); - private String queryIndex = "query-" + randomAsciiOfLength(10).toLowerCase(Locale.ROOT); - private String lookupIndex = "lookup-" + randomAsciiOfLength(10).toLowerCase(Locale.ROOT); + private String randomHeaderValue = randomAlphaOfLength(20); + private String queryIndex = "query-" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + private String lookupIndex = "lookup-" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT); @Override protected Settings nodeSettings(int nodeOrdinal) { diff --git a/qa/vagrant/versions b/qa/vagrant/versions index 40ad2117f7d..6269784d341 100644 --- a/qa/vagrant/versions +++ b/qa/vagrant/versions @@ -6,3 +6,4 @@ 5.2.0 5.2.1 5.2.2 +5.3.0 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/field_caps.json b/rest-api-spec/src/main/resources/rest-api-spec/api/field_caps.json new file mode 100644 index 00000000000..d993dc0545b --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/field_caps.json @@ -0,0 +1,43 @@ +{ + "field_caps": { + "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/search-field-caps.html", + "methods": ["GET", "POST"], + "url": { + "path": "/_field_caps", + "paths": [ + "/_field_caps", + "/{index}/_field_caps" + ], + "parts": { + "index": { + "type" : "list", + "description" : "A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices" + } + }, + "params": { + "fields": { + "type" : "list", + "description" : "A comma-separated list of field names" + }, + "ignore_unavailable": { + "type" : "boolean", + "description" : "Whether specified concrete indices should be ignored when unavailable (missing or closed)" + }, + "allow_no_indices": { + "type" : "boolean", + "description" : "Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)" + }, + "expand_wildcards": { + "type" : "enum", + "options" : ["open","closed","none","all"], + "default" : "open", + "description" : "Whether to expand wildcard expression to concrete indices that are open, closed or both." + } + } + }, + "body": { + "description": "Field json objects containing an array of field names", + "required": false + } + } +} \ No newline at end of file diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml index 579408dbc0b..8d72d40b10b 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml @@ -277,7 +277,8 @@ h: [index, docs] s: [docs] - - match: # don't use the store here it's cached and might be stale +# don't use the store here it's cached and might be stale + - match: $body: | /^ foo \s+ 0\n bar \s+ 1\n diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.templates/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.templates/10_basic.yaml index 7c7445fc67d..7dd43e33bec 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.templates/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.templates/10_basic.yaml @@ -61,7 +61,8 @@ cat.templates: {} - match: - $body: / + $body: > + / (^|\n)test \s+ \[test-\*\] \s+ 0 \s+ @@ -70,7 +71,8 @@ / - match: - $body: / + $body: > + / (^|\n)test_2 \s+ \[test-2\*\] \s+ 1 \s+ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.put_settings/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.put_settings/10_basic.yaml index 5031c977ccd..083466f94a5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.put_settings/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.put_settings/10_basic.yaml @@ -64,8 +64,9 @@ --- "Test get a default settings": +# this can't be bumped to 5.0.2 until snapshots are published - skip: - version: " - 5.99.99" # this can't be bumped to 5.0.2 until snapshots are published + version: " - 5.99.99" reason: Fetching default group setting was buggy until 5.0.3 - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/30_internal_version.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/30_internal_version.yaml index e4cbc443055..e220d988161 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/30_internal_version.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/create/30_internal_version.yaml @@ -26,13 +26,13 @@ reason: validation logic only fixed from 5.1.2 onwards - do: + catch: request create: index: test type: test id: 3 body: { foo: bar } version: 5 - ignore: 400 - match: { status: 400 } - match: { error.type: action_request_validation_exception } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/35_external_version.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/35_external_version.yaml index 96503aae8ae..e29690fe8d0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/35_external_version.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/create/35_external_version.yaml @@ -6,6 +6,7 @@ reason: validation logic only fixed from 5.1.2 onwards - do: + catch: request create: index: test type: test @@ -13,13 +14,13 @@ body: { foo: bar } version_type: external version: 0 - ignore: 400 - match: { status: 400 } - match: { error.type: action_request_validation_exception } - match: { error.reason: "Validation Failed: 1: create operations only support internal versioning. use index instead;" } - do: + catch: request create: index: test type: test @@ -27,7 +28,6 @@ body: { foo: bar } version_type: external version: 5 - ignore: 400 - match: { status: 400 } - match: { error.type: action_request_validation_exception } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yaml index 4ea921a3fa0..b5a9212d36b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yaml @@ -4,9 +4,9 @@ setup: index: test_1 body: aliases: - alias_1: { + alias_1: "filter" : { "term" : { "foo" : "bar"} } - } + - do: index: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yaml new file mode 100644 index 00000000000..edda7b6dbf3 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yaml @@ -0,0 +1,167 @@ +--- +setup: + - do: + indices.create: + index: test1 + body: + mappings: + t: + properties: + text: + type: text + keyword: + type: keyword + number: + type: double + geo: + type: geo_point + object: + type: object + properties: + nested1 : + type : text + index: false + nested2: + type: float + doc_values: false + - do: + indices.create: + index: test2 + body: + mappings: + t: + properties: + text: + type: text + keyword: + type: keyword + number: + type: double + geo: + type: geo_point + object: + type: object + properties: + nested1 : + type : text + index: true + nested2: + type: float + doc_values: true + - do: + indices.create: + index: test3 + body: + mappings: + t: + properties: + text: + type: text + keyword: + type: keyword + number: + type: long + geo: + type: keyword + object: + type: object + properties: + nested1 : + type : long + index: false + nested2: + type: keyword + doc_values: false + +--- +"Get simple field caps": + - skip: + version: " - 5.99.99" + reason: this uses a new API that has been added in 6.0 + + - do: + field_caps: + index: 'test1,test2,test3' + fields: [text, keyword, number, geo] + + - match: {fields.text.text.searchable: true} + - match: {fields.text.text.aggregatable: false} + - is_false: fields.text.text.indices + - is_false: fields.text.text.non_searchable_indices + - is_false: fields.text.text.non_aggregatable_indices + - match: {fields.keyword.keyword.searchable: true} + - match: {fields.keyword.keyword.aggregatable: true} + - is_false: fields.text.keyword.indices + - is_false: fields.text.keyword.non_searchable_indices + - is_false: fields.text.keyword.non_aggregatable_indices + - match: {fields.number.double.searchable: true} + - match: {fields.number.double.aggregatable: true} + - match: {fields.number.double.indices: ["test1", "test2"]} + - is_false: fields.number.double.non_searchable_indices + - is_false: fields.number.double.non_aggregatable_indices + - match: {fields.number.long.searchable: true} + - match: {fields.number.long.aggregatable: true} + - match: {fields.number.long.indices: ["test3"]} + - is_false: fields.number.long.non_searchable_indices + - is_false: fields.number.long.non_aggregatable_indices + - match: {fields.geo.geo_point.searchable: true} + - match: {fields.geo.geo_point.aggregatable: true} + - match: {fields.geo.geo_point.indices: ["test1", "test2"]} + - is_false: fields.geo.geo_point.non_searchable_indices + - is_false: fields.geo.geo_point.non_aggregatable_indices + - match: {fields.geo.keyword.searchable: true} + - match: {fields.geo.keyword.aggregatable: true} + - match: {fields.geo.keyword.indices: ["test3"]} + - is_false: fields.geo.keyword.non_searchable_indices + - is_false: fields.geo.keyword.on_aggregatable_indices +--- +"Get nested field caps": + - skip: + version: " - 5.99.99" + reason: this uses a new API that has been added in 6.0 + + - do: + field_caps: + index: 'test1,test2,test3' + fields: object* + + - match: {fields.object\.nested1.long.searchable: false} + - match: {fields.object\.nested1.long.aggregatable: true} + - match: {fields.object\.nested1.long.indices: ["test3"]} + - is_false: fields.object\.nested1.long.non_searchable_indices + - is_false: fields.object\.nested1.long.non_aggregatable_indices + - match: {fields.object\.nested1.text.searchable: false} + - match: {fields.object\.nested1.text.aggregatable: false} + - match: {fields.object\.nested1.text.indices: ["test1", "test2"]} + - match: {fields.object\.nested1.text.non_searchable_indices: ["test1"]} + - is_false: fields.object\.nested1.text.non_aggregatable_indices + - match: {fields.object\.nested2.float.searchable: true} + - match: {fields.object\.nested2.float.aggregatable: false} + - match: {fields.object\.nested2.float.indices: ["test1", "test2"]} + - match: {fields.object\.nested2.float.non_aggregatable_indices: ["test1"]} + - is_false: fields.object\.nested2.float.non_searchable_indices + - match: {fields.object\.nested2.keyword.searchable: true} + - match: {fields.object\.nested2.keyword.aggregatable: false} + - match: {fields.object\.nested2.keyword.indices: ["test3"]} + - is_false: fields.object\.nested2.keyword.non_aggregatable_indices + - is_false: fields.object\.nested2.keyword.non_searchable_indices +--- +"Get prefix field caps": + - skip: + version: " - 5.99.99" + reason: this uses a new API that has been added in 6.0 + + - do: + field_caps: + index: _all + fields: "n*" + - match: {fields.number.double.searchable: true} + - match: {fields.number.double.aggregatable: true} + - match: {fields.number.double.indices: ["test1", "test2"]} + - is_false: fields.number.double.non_searchable_indices + - is_false: fields.number.double.non_aggregatable_indices + - match: {fields.number.long.searchable: true} + - match: {fields.number.long.aggregatable: true} + - match: {fields.number.long.indices: ["test3"]} + - is_false: fields.number.long.non_searchable_indices + - is_false: fields.number.long.non_aggregatable_indices diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yaml index 68bb11c42ba..d8db152e979 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.clear_cache/10_basic.yaml @@ -6,8 +6,8 @@ --- "clear_cache with request set to false": - skip: - version: " - 5.4.99" - reason: this name was added in 5.4 - temporarilly skipping 5.4 until snapshot is finished + version: " - 5.3.99" + reason: this name was added in 5.4 - do: indices.clear_cache: @@ -16,8 +16,8 @@ --- "clear_cache with request_cache set to false": - skip: - version: " - 5.4.99" - reason: request_cache was deprecated in 5.4.0 - temporarilly skipping 5.4 until snapshot is finished + version: " - 5.3.99" + reason: request_cache was deprecated in 5.4.0 features: "warnings" - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yaml index d3f4134c0f6..b7724e06283 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yaml @@ -106,9 +106,9 @@ setup: version: " - 5.0.99" reason: strict stats handling does not exist in 5.0 - do: + catch: request indices.stats: metric: [ fieldata ] - ignore: 400 - match: { status: 400 } - match: { error.type: illegal_argument_exception } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/10_basic.yaml index 637ebd4253e..29b1e664d61 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/10_basic.yaml @@ -6,9 +6,9 @@ setup: settings: number_of_replicas: 0 aliases: - alias_1: { + alias_1: "filter" : { "match_all" : {} } - } + --- "Validate query api": diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/12_non_existent_index.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/12_non_existent_index.yaml index fd0100b0d63..06234642250 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/12_non_existent_index.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/12_non_existent_index.yaml @@ -21,7 +21,6 @@ - do: mget: body: - index: test_2 docs: - { _index: test_1, _type: test, _id: 1} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/nodes.stats/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/nodes.stats/10_basic.yaml index 039a24284ea..62664319d8a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/nodes.stats/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/nodes.stats/10_basic.yaml @@ -27,9 +27,9 @@ version: " - 5.0.99" reason: strict stats handling does not exist in 5.0 - do: + catch: request nodes.stats: metric: [ transprot ] - ignore: 400 - match: { status: 400 } - match: { error.type: illegal_argument_exception } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yaml index 52379390d47..c9ba94cf615 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/20_terms.yaml @@ -453,9 +453,9 @@ setup: - do: search: - body: { "size" : 0, "aggs" : { "str_terms" : { "terms" : { "field" : "str", "include" : {"partition":0, "num_partitions":2 } } } } } + body: { "size" : 0, "aggs" : { "str_terms" : { "terms" : { "field" : "str", "include" : {"partition": 0, "num_partitions": 2 } } } } } - - match: { hits.total: 3 } + - match: { hits.total : 3 } - length: { aggregations.str_terms.buckets: 1 } @@ -467,7 +467,7 @@ setup: - do: search: - body: { "size" : 0, "aggs" : { "str_terms" : { "terms" : { "field" : "str", "include" : {"partition":1, "num_partitions":2 } } } } } + body: { "size" : 0, "aggs" : { "str_terms" : { "terms" : { "field" : "str", "include" : {"partition": 1, "num_partitions": 2 } } } } } - match: { hits.total: 3 } @@ -512,7 +512,7 @@ setup: - do: search: - body: { "size" : 0, "aggs" : { "str_terms" : { "terms" : { "field" : "integer", "include" : {"partition":0, "num_partitions":2 } } } } } + body: { "size" : 0, "aggs" : { "str_terms" : { "terms" : { "field" : "integer", "include" : {"partition": 0, "num_partitions": 2 } } } } } - match: { hits.total: 3 } @@ -524,7 +524,7 @@ setup: - do: search: - body: { "size" : 0, "aggs" : { "str_terms" : { "terms" : { "field" : "integer", "include" : {"partition":1, "num_partitions":2 } } } } } + body: { "size" : 0, "aggs" : { "str_terms" : { "terms" : { "field" : "integer", "include" : {"partition": 1, "num_partitions": 2 } } } } } - match: { hits.total: 3 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic.yaml index f94ba86d914..d48d5088756 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic.yaml @@ -57,7 +57,10 @@ - length: { shards: 1 } - match: { shards.0.0.index: test_index } - - match: { indices.test_index: {aliases: [test_alias_filter_1], filter: { term : { field: { value: value1, boost: 1.0}}}}} + - match: { indices.test_index.aliases: [test_alias_filter_1] } + - match: { indices.test_index.filter.term.field.value: value1 } + - lte: { indices.test_index.filter.term.field.boost: 1.0 } + - gte: { indices.test_index.filter.term.field.boost: 1.0 } - do: search_shards: @@ -65,4 +68,14 @@ - length: { shards: 1 } - match: { shards.0.0.index: test_index } - - match: { indices.test_index: {aliases: [test_alias_filter_1, test_alias_filter_2], filter: { bool: { should : [{ term : { field: { value: value1, boost: 1.0}}}, { term : { field: { value: value2, boost: 1.0}}}], adjust_pure_negative: true, boost: 1.0, disable_coord: false }}}} + - match: { indices.test_index.aliases: [test_alias_filter_1, test_alias_filter_2]} + - match: { indices.test_index.filter.bool.should.0.term.field.value: value1 } + - lte: { indices.test_index.filter.bool.should.0.term.field.boost: 1.0 } + - gte: { indices.test_index.filter.bool.should.0.term.field.boost: 1.0 } + - match: { indices.test_index.filter.bool.should.1.term.field.value: value2} + - lte: { indices.test_index.filter.bool.should.1.term.field.boost: 1.0 } + - gte: { indices.test_index.filter.bool.should.1.term.field.boost: 1.0 } + - match: { indices.test_index.filter.bool.adjust_pure_negative: true} + - match: { indices.test_index.filter.bool.disable_coord: false} + - lte: { indices.test_index.filter.bool.boost: 1.0 } + - gte: { indices.test_index.filter.bool.boost: 1.0 } diff --git a/settings.gradle b/settings.gradle index b68c2278024..c3c153366be 100644 --- a/settings.gradle +++ b/settings.gradle @@ -15,6 +15,7 @@ List projects = [ 'client:benchmark', 'benchmarks', 'distribution:integ-test-zip', + 'distribution:bwc-zip', 'distribution:zip', 'distribution:tar', 'distribution:deb', diff --git a/test/framework/src/main/java/org/elasticsearch/action/bulk/byscroll/AbstractBulkByScrollRequestTestCase.java b/test/framework/src/main/java/org/elasticsearch/action/bulk/byscroll/AbstractBulkByScrollRequestTestCase.java index 8a12a32ad0b..c4ac9587e82 100644 --- a/test/framework/src/main/java/org/elasticsearch/action/bulk/byscroll/AbstractBulkByScrollRequestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/action/bulk/byscroll/AbstractBulkByScrollRequestTestCase.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.bulk.byscroll; -import org.elasticsearch.action.bulk.byscroll.AbstractBulkByScrollRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.tasks.TaskId; @@ -45,7 +44,7 @@ public abstract class AbstractBulkByScrollRequestTestCase r < 0, ESTestCase::randomFloat)); original.setSize(randomBoolean() ? AbstractBulkByScrollRequest.SIZE_ALL_MATCHES : between(0, Integer.MAX_VALUE)); - TaskId slicingTask = new TaskId(randomAsciiOfLength(5), randomLong()); + TaskId slicingTask = new TaskId(randomAlphaOfLength(5), randomLong()); SearchRequest sliceRequest = new SearchRequest(); R forSliced = original.forSlice(slicingTask, sliceRequest); assertEquals(original.isAbortOnVersionConflict(), forSliced.isAbortOnVersionConflict()); diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java index 4ab45d8c1a6..c88b56abdd0 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java @@ -22,6 +22,7 @@ package org.elasticsearch.bootstrap; import com.carrotsearch.randomizedtesting.RandomizedRunner; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.SecureSM; +import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.FileSystemUtils; @@ -119,7 +120,9 @@ public class BootstrapForTesting { perms.add(new FilePermission(System.getProperty("tests.config"), "read,readlink")); } // jacoco coverage output file - if (Boolean.getBoolean("tests.coverage")) { + final boolean testsCoverage = + Booleans.parseBoolean(System.getProperty("tests.coverage", "false")); + if (testsCoverage) { Path coverageDir = PathUtils.get(System.getProperty("tests.coverage.dir")); perms.add(new FilePermission(coverageDir.resolve("jacoco.exec").toString(), "read,write")); // in case we get fancy and use the -integration goals later: diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java b/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java index d0d6c02f636..a4ac6fad241 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java @@ -27,7 +27,7 @@ import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.test.ESTestCase; -import static org.elasticsearch.test.ESTestCase.randomAsciiOfLength; +import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; /** * A helper that allows to create shard routing instances within tests, while not requiring to expose @@ -131,7 +131,7 @@ public class TestShardRouting { RecoverySource.PeerRecoverySource.INSTANCE, RecoverySource.LocalShardsRecoverySource.INSTANCE, new RecoverySource.SnapshotRecoverySource( - new Snapshot("repo", new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())), + new Snapshot("repo", new SnapshotId(randomAlphaOfLength(8), UUIDs.randomBase64UUID())), Version.CURRENT, "some_index")); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index 01c142133a6..215551da356 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -173,7 +173,7 @@ public abstract class IndexShardTestCase extends ESTestCase { * @param listeners an optional set of listeners to add to the shard */ protected IndexShard newShard(ShardId shardId, boolean primary, IndexingOperationListener... listeners) throws IOException { - ShardRouting shardRouting = TestShardRouting.newShardRouting(shardId, randomAsciiOfLength(5), primary, + ShardRouting shardRouting = TestShardRouting.newShardRouting(shardId, randomAlphaOfLength(5), primary, ShardRoutingState.INITIALIZING, primary ? RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE); return newShard(shardRouting, listeners); diff --git a/test/framework/src/main/java/org/elasticsearch/node/NodeTests.java b/test/framework/src/main/java/org/elasticsearch/node/NodeTests.java index 69291ccaba6..ae4aff917a9 100644 --- a/test/framework/src/main/java/org/elasticsearch/node/NodeTests.java +++ b/test/framework/src/main/java/org/elasticsearch/node/NodeTests.java @@ -48,7 +48,7 @@ public class NodeTests extends ESTestCase { public void testNodeName() throws IOException { final Path tempDir = createTempDir(); - final String name = randomBoolean() ? randomAsciiOfLength(10) : null; + final String name = randomBoolean() ? randomAlphaOfLength(10) : null; Settings.Builder settings = Settings.builder() .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), InternalTestCluster.clusterName("single-node-cluster", randomLong())) .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) @@ -88,7 +88,7 @@ public class NodeTests extends ESTestCase { public void testLoadPluginBootstrapChecks() throws IOException { final Path tempDir = createTempDir(); - final String name = randomBoolean() ? randomAsciiOfLength(10) : null; + final String name = randomBoolean() ? randomAlphaOfLength(10) : null; Settings.Builder settings = Settings.builder() .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), InternalTestCluster.clusterName("single-node-cluster", randomLong())) .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) @@ -139,7 +139,7 @@ public class NodeTests extends ESTestCase { } public void testNodeAttributes() throws IOException { - String attr = randomAsciiOfLength(5); + String attr = randomAlphaOfLength(5); Settings.Builder settings = baseSettings().put(Node.NODE_ATTRIBUTES.getKey() + "test_attr", attr); try (Node node = new MockNode(settings.build(), Collections.singleton(MockTcpTransportPlugin.class))) { final Settings nodeSettings = randomBoolean() ? node.settings() : node.getEnvironment().settings(); diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java index 1bb5bb58eaf..c79f432278a 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java @@ -222,7 +222,7 @@ public abstract class ESBlobStoreRepositoryIntegTestCase extends ESIntegTestCase IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { indexRequestBuilders[i] = client().prepareIndex(name, name, Integer.toString(i)) - .setRouting(randomAsciiOfLength(randomIntBetween(1, 10))).setSource("field", "value"); + .setRouting(randomAlphaOfLength(randomIntBetween(1, 10))).setSource("field", "value"); } indexRandom(true, indexRequestBuilders); } @@ -262,6 +262,6 @@ public abstract class ESBlobStoreRepositoryIntegTestCase extends ESIntegTestCase } public static String randomAsciiName() { - return randomAsciiOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + return randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); } } diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index e036676677f..c2447b4504e 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -32,6 +32,8 @@ import java.util.HashMap; import java.util.Map; import java.util.function.Function; +import static java.util.Collections.emptyMap; + /** * A mocked script engine that can be used for testing purpose. * @@ -215,4 +217,9 @@ public class MockScriptEngine implements ScriptEngineService { return true; } } + + public static Script mockInlineScript(final String script) { + return new Script(ScriptType.INLINE, "mock", script, emptyMap()); + } + } diff --git a/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java b/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java index f49cd174b7f..2a072a1d3be 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java @@ -52,7 +52,7 @@ import java.util.function.Supplier; import static org.elasticsearch.test.ESTestCase.between; import static org.elasticsearch.test.ESTestCase.generateRandomStringArray; -import static org.elasticsearch.test.ESTestCase.randomAsciiOfLengthBetween; +import static org.elasticsearch.test.ESTestCase.randomAlphaOfLengthBetween; import static org.elasticsearch.test.ESTestCase.randomBoolean; import static org.elasticsearch.test.ESTestCase.randomByte; import static org.elasticsearch.test.ESTestCase.randomDouble; @@ -89,13 +89,13 @@ public class RandomSearchRequestGenerator { searchRequest.types(generateRandomStringArray(10, 10, false, false)); } if (randomBoolean()) { - searchRequest.preference(randomAsciiOfLengthBetween(3, 10)); + searchRequest.preference(randomAlphaOfLengthBetween(3, 10)); } if (randomBoolean()) { searchRequest.requestCache(randomBoolean()); } if (randomBoolean()) { - searchRequest.routing(randomAsciiOfLengthBetween(3, 10)); + searchRequest.routing(randomAlphaOfLengthBetween(3, 10)); } if (randomBoolean()) { searchRequest.scroll(randomPositiveTimeValue()); @@ -152,7 +152,7 @@ public class RandomSearchRequestGenerator { int fieldsSize = randomInt(25); List fields = new ArrayList<>(fieldsSize); for (int i = 0; i < fieldsSize; i++) { - fields.add(randomAsciiOfLengthBetween(5, 50)); + fields.add(randomAlphaOfLengthBetween(5, 50)); } builder.storedFields(fields); break; @@ -164,9 +164,9 @@ public class RandomSearchRequestGenerator { int scriptFieldsSize = randomInt(25); for (int i = 0; i < scriptFieldsSize; i++) { if (randomBoolean()) { - builder.scriptField(randomAsciiOfLengthBetween(5, 50), new Script("foo"), randomBoolean()); + builder.scriptField(randomAlphaOfLengthBetween(5, 50), new Script("foo"), randomBoolean()); } else { - builder.scriptField(randomAsciiOfLengthBetween(5, 50), new Script("foo")); + builder.scriptField(randomAlphaOfLengthBetween(5, 50), new Script("foo")); } } } @@ -175,11 +175,11 @@ public class RandomSearchRequestGenerator { int branch = randomInt(5); String[] includes = new String[randomIntBetween(0, 20)]; for (int i = 0; i < includes.length; i++) { - includes[i] = randomAsciiOfLengthBetween(5, 20); + includes[i] = randomAlphaOfLengthBetween(5, 20); } String[] excludes = new String[randomIntBetween(0, 20)]; for (int i = 0; i < excludes.length; i++) { - excludes[i] = randomAsciiOfLengthBetween(5, 20); + excludes[i] = randomAlphaOfLengthBetween(5, 20); } switch (branch) { case 0: @@ -189,8 +189,8 @@ public class RandomSearchRequestGenerator { fetchSourceContext = new FetchSourceContext(true, includes, excludes); break; case 2: - fetchSourceContext = new FetchSourceContext(true, new String[]{randomAsciiOfLengthBetween(5, 20)}, - new String[]{randomAsciiOfLengthBetween(5, 20)}); + fetchSourceContext = new FetchSourceContext(true, new String[]{randomAlphaOfLengthBetween(5, 20)}, + new String[]{randomAlphaOfLengthBetween(5, 20)}); break; case 3: fetchSourceContext = new FetchSourceContext(true, includes, excludes); @@ -199,7 +199,7 @@ public class RandomSearchRequestGenerator { fetchSourceContext = new FetchSourceContext(true, includes, null); break; case 5: - fetchSourceContext = new FetchSourceContext(true, new String[] {randomAsciiOfLengthBetween(5, 20)}, null); + fetchSourceContext = new FetchSourceContext(true, new String[] {randomAlphaOfLengthBetween(5, 20)}, null); break; default: throw new IllegalStateException(); @@ -210,21 +210,21 @@ public class RandomSearchRequestGenerator { int size = randomIntBetween(0, 20); List statsGroups = new ArrayList<>(size); for (int i = 0; i < size; i++) { - statsGroups.add(randomAsciiOfLengthBetween(5, 20)); + statsGroups.add(randomAlphaOfLengthBetween(5, 20)); } builder.stats(statsGroups); } if (randomBoolean()) { int indexBoostSize = randomIntBetween(1, 10); for (int i = 0; i < indexBoostSize; i++) { - builder.indexBoost(randomAsciiOfLengthBetween(5, 20), randomFloat() * 10); + builder.indexBoost(randomAlphaOfLengthBetween(5, 20), randomFloat() * 10); } } if (randomBoolean()) { - builder.query(QueryBuilders.termQuery(randomAsciiOfLengthBetween(5, 20), randomAsciiOfLengthBetween(5, 20))); + builder.query(QueryBuilders.termQuery(randomAlphaOfLengthBetween(5, 20), randomAlphaOfLengthBetween(5, 20))); } if (randomBoolean()) { - builder.postFilter(QueryBuilders.termQuery(randomAsciiOfLengthBetween(5, 20), randomAsciiOfLengthBetween(5, 20))); + builder.postFilter(QueryBuilders.termQuery(randomAlphaOfLengthBetween(5, 20), randomAlphaOfLengthBetween(5, 20))); } if (randomBoolean()) { int numSorts = randomIntBetween(1, 5); @@ -232,10 +232,10 @@ public class RandomSearchRequestGenerator { int branch = randomInt(5); switch (branch) { case 0: - builder.sort(SortBuilders.fieldSort(randomAsciiOfLengthBetween(5, 20)).order(randomFrom(SortOrder.values()))); + builder.sort(SortBuilders.fieldSort(randomAlphaOfLengthBetween(5, 20)).order(randomFrom(SortOrder.values()))); break; case 1: - builder.sort(SortBuilders.geoDistanceSort(randomAsciiOfLengthBetween(5, 20), + builder.sort(SortBuilders.geoDistanceSort(randomAlphaOfLengthBetween(5, 20), AbstractQueryTestCase.randomGeohash(1, 12)).order(randomFrom(SortOrder.values()))); break; case 2: @@ -246,10 +246,10 @@ public class RandomSearchRequestGenerator { ScriptSortBuilder.ScriptSortType.NUMBER).order(randomFrom(SortOrder.values()))); break; case 4: - builder.sort(randomAsciiOfLengthBetween(5, 20)); + builder.sort(randomAlphaOfLengthBetween(5, 20)); break; case 5: - builder.sort(randomAsciiOfLengthBetween(5, 20), randomFrom(SortOrder.values())); + builder.sort(randomAlphaOfLengthBetween(5, 20), randomFrom(SortOrder.values())); break; } } @@ -281,7 +281,7 @@ public class RandomSearchRequestGenerator { jsonBuilder.value(randomDouble()); break; case 4: - jsonBuilder.value(randomAsciiOfLengthBetween(5, 20)); + jsonBuilder.value(randomAlphaOfLengthBetween(5, 20)); break; case 5: jsonBuilder.value(randomBoolean()); @@ -293,7 +293,7 @@ public class RandomSearchRequestGenerator { jsonBuilder.value(randomShort()); break; case 8: - jsonBuilder.value(new Text(randomAsciiOfLengthBetween(5, 20))); + jsonBuilder.value(new Text(randomAlphaOfLengthBetween(5, 20))); break; } } @@ -322,13 +322,13 @@ public class RandomSearchRequestGenerator { } } if (randomBoolean()) { - builder.aggregation(AggregationBuilders.avg(randomAsciiOfLengthBetween(5, 20))); + builder.aggregation(AggregationBuilders.avg(randomAlphaOfLengthBetween(5, 20))); } if (randomBoolean()) { builder.ext(randomExtBuilders.get()); } if (randomBoolean()) { - String field = randomBoolean() ? null : randomAsciiOfLengthBetween(5, 20); + String field = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 20); int max = between(2, 1000); int id = randomInt(max-1); if (field == null) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index ad414db491f..cb271b25b3c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -183,12 +183,12 @@ public abstract class AbstractQueryTestCase> indexSettings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, indexVersionCreated).build(); - index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_"); + index = new Index(randomAlphaOfLengthBetween(1, 10), "_na_"); //create some random type with some default field, those types will stick around for all of the subclasses currentTypes = new String[randomIntBetween(0, 5)]; for (int i = 0; i < currentTypes.length; i++) { - String type = randomAsciiOfLengthBetween(1, 10); + String type = randomAlphaOfLengthBetween(1, 10); currentTypes[i] = type; } //set some random types to be queried as part the search request, before each test @@ -248,7 +248,7 @@ public abstract class AbstractQueryTestCase> * make sure query names are unique by suffixing them with increasing counter */ private static String createUniqueRandomName() { - String queryName = randomAsciiOfLengthBetween(1, 10) + queryNameId; + String queryName = randomAlphaOfLengthBetween(1, 10) + queryNameId; queryNameId++; return queryName; } @@ -597,8 +597,8 @@ public abstract class AbstractQueryTestCase> QB secondQuery = copyQuery(firstQuery); // query _name never should affect the result of toQuery, we randomly set it to make sure if (randomBoolean()) { - secondQuery.queryName(secondQuery.queryName() == null ? randomAsciiOfLengthBetween(1, 30) : secondQuery.queryName() - + randomAsciiOfLengthBetween(1, 10)); + secondQuery.queryName(secondQuery.queryName() == null ? randomAlphaOfLengthBetween(1, 30) : secondQuery.queryName() + + randomAlphaOfLengthBetween(1, 10)); } searchContext = getSearchContext(randomTypes, context); Query secondLuceneQuery = rewriteQuery(secondQuery, context).toQuery(context); @@ -733,8 +733,8 @@ public abstract class AbstractQueryTestCase> private QB changeNameOrBoost(QB original) throws IOException { QB secondQuery = copyQuery(original); if (randomBoolean()) { - secondQuery.queryName(secondQuery.queryName() == null ? randomAsciiOfLengthBetween(1, 30) : secondQuery.queryName() - + randomAsciiOfLengthBetween(1, 10)); + secondQuery.queryName(secondQuery.queryName() == null ? randomAlphaOfLengthBetween(1, 30) : secondQuery.queryName() + + randomAlphaOfLengthBetween(1, 10)); } else { secondQuery.boost(original.boost() + 1f + randomFloat()); } @@ -773,7 +773,7 @@ public abstract class AbstractQueryTestCase> JsonStringEncoder encoder = JsonStringEncoder.getInstance(); value = new String(encoder.quoteAsString(randomUnicodeOfLength(10))); } else { - value = randomAsciiOfLengthBetween(1, 10); + value = randomAlphaOfLengthBetween(1, 10); } break; case INT_FIELD_NAME: @@ -789,7 +789,7 @@ public abstract class AbstractQueryTestCase> value = new DateTime(System.currentTimeMillis(), DateTimeZone.UTC).toString(); break; default: - value = randomAsciiOfLengthBetween(1, 10); + value = randomAlphaOfLengthBetween(1, 10); } return value; } @@ -798,7 +798,7 @@ public abstract class AbstractQueryTestCase> int terms = randomIntBetween(0, 3); StringBuilder builder = new StringBuilder(); for (int i = 0; i < terms; i++) { - builder.append(randomAsciiOfLengthBetween(1, 10)).append(" "); + builder.append(randomAlphaOfLengthBetween(1, 10)).append(" "); } return builder.toString().trim(); } @@ -809,7 +809,7 @@ public abstract class AbstractQueryTestCase> protected static String getRandomFieldName() { // if no type is set then return a random field name if (currentTypes.length == 0 || randomBoolean()) { - return randomAsciiOfLengthBetween(1, 10); + return randomAlphaOfLengthBetween(1, 10); } return randomFrom(MAPPED_LEAF_FIELD_NAMES); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index b4239ccd77a..db15ac0c335 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -705,7 +705,7 @@ public abstract class ESIntegTestCase extends ESTestCase { } // 30% of the time if (randomInt(9) < 3) { - final String dataPath = randomAsciiOfLength(10); + final String dataPath = randomAlphaOfLength(10); logger.info("using custom data_path for index: [{}]", dataPath); builder.put(IndexMetaData.SETTING_DATA_PATH, dataPath); } @@ -1963,7 +1963,7 @@ public abstract class ESIntegTestCase extends ESTestCase { assert repoFiles.length > 0; Path path; do { - path = repoFiles[0].resolve(randomAsciiOfLength(10)); + path = repoFiles[0].resolve(randomAlphaOfLength(10)); } while (Files.exists(path)); return path; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index fa659e06fb2..587d390f300 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -572,11 +572,11 @@ public abstract class ESTestCase extends LuceneTestCase { return RandomPicks.randomFrom(random, collection); } - public static String randomAsciiOfLengthBetween(int minCodeUnits, int maxCodeUnits) { + public static String randomAlphaOfLengthBetween(int minCodeUnits, int maxCodeUnits) { return RandomizedTest.randomAsciiOfLengthBetween(minCodeUnits, maxCodeUnits); } - public static String randomAsciiOfLength(int codeUnits) { + public static String randomAlphaOfLength(int codeUnits) { return RandomizedTest.randomAsciiOfLength(codeUnits); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 85278358bec..cf9c2dc3515 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -64,6 +64,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.zen.ElectMasterService; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.env.Environment; @@ -83,8 +84,8 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.node.MockNode; import org.elasticsearch.node.Node; -import org.elasticsearch.node.NodeValidationException; import org.elasticsearch.node.NodeService; +import org.elasticsearch.node.NodeValidationException; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchService; @@ -129,6 +130,7 @@ import java.util.stream.Stream; import static org.apache.lucene.util.LuceneTestCase.TEST_NIGHTLY; import static org.apache.lucene.util.LuceneTestCase.rarely; +import static org.elasticsearch.discovery.DiscoverySettings.INITIAL_STATE_TIMEOUT_SETTING; import static org.elasticsearch.discovery.zen.ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING; import static org.elasticsearch.test.ESTestCase.assertBusy; import static org.elasticsearch.test.ESTestCase.awaitBusy; @@ -589,14 +591,18 @@ public final class InternalTestCluster extends TestCluster { .put("node.name", name) .put(NodeEnvironment.NODE_ID_SEED_SETTING.getKey(), seed); - if (autoManageMinMasterNodes) { + final boolean usingSingleNodeDiscovery = DiscoveryModule.DISCOVERY_TYPE_SETTING.get(finalSettings.build()).equals("single-node"); + if (!usingSingleNodeDiscovery && autoManageMinMasterNodes) { assert finalSettings.get(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()) == null : "min master nodes may not be set when auto managed"; + assert finalSettings.get(INITIAL_STATE_TIMEOUT_SETTING.getKey()) == null : + "automatically managing min master nodes require nodes to complete a join cycle" + + " when starting"; finalSettings // don't wait too long not to slow down tests .put(ZenDiscovery.MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING.getKey(), "5s") .put(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), defaultMinMasterNodes); - } else if (finalSettings.get(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()) == null) { + } else if (!usingSingleNodeDiscovery && finalSettings.get(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()) == null) { throw new IllegalArgumentException(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey() + " must be configured"); } MockNode node = new MockNode(finalSettings.build(), plugins); @@ -1079,7 +1085,7 @@ public final class InternalTestCluster extends TestCluster { } return true; }, 30, TimeUnit.SECONDS) == false) { - throw new IllegalStateException("cluster failed to from with expected nodes " + expectedNodes + " and actual nodes " + + throw new IllegalStateException("cluster failed to form with expected nodes " + expectedNodes + " and actual nodes " + client.admin().cluster().prepareState().get().getState().nodes()); } } catch (InterruptedException e) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruptionTests.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruptionTests.java index edc261c1759..298ca869ae5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruptionTests.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruptionTests.java @@ -48,10 +48,10 @@ public class NetworkDisruptionTests extends ESTestCase { assertTrue(topology.disrupt(randomFrom(partition2), randomFrom(partition1))); assertFalse(topology.disrupt(randomFrom(partition1), randomFrom(partition1))); assertFalse(topology.disrupt(randomFrom(partition2), randomFrom(partition2))); - assertFalse(topology.disrupt(randomAsciiOfLength(10), randomFrom(partition1))); - assertFalse(topology.disrupt(randomAsciiOfLength(10), randomFrom(partition2))); - assertFalse(topology.disrupt(randomFrom(partition1), randomAsciiOfLength(10))); - assertFalse(topology.disrupt(randomFrom(partition2), randomAsciiOfLength(10))); + assertFalse(topology.disrupt(randomAlphaOfLength(10), randomFrom(partition1))); + assertFalse(topology.disrupt(randomAlphaOfLength(10), randomFrom(partition2))); + assertFalse(topology.disrupt(randomFrom(partition1), randomAlphaOfLength(10))); + assertFalse(topology.disrupt(randomFrom(partition2), randomAlphaOfLength(10))); } assertTrue(topology.getMajoritySide().size() >= topology.getMinoritySide().size()); } @@ -74,7 +74,7 @@ public class NetworkDisruptionTests extends ESTestCase { public void testBridge() { Set partition1 = generateRandomStringSet(1, 10); Set partition2 = generateRandomStringSet(1, 10); - String bridgeNode = randomAsciiOfLength(10); + String bridgeNode = randomAlphaOfLength(10); Bridge topology = new Bridge(bridgeNode, partition1, partition2); checkBridge(topology, bridgeNode, partition1, partition2); } @@ -97,12 +97,12 @@ public class NetworkDisruptionTests extends ESTestCase { assertFalse(topology.disrupt(randomFrom(partition2), randomFrom(partition2))); assertFalse(topology.disrupt(randomFrom(partition2), bridgeNode)); assertFalse(topology.disrupt(bridgeNode, randomFrom(partition2))); - assertFalse(topology.disrupt(randomAsciiOfLength(10), randomFrom(partition1))); - assertFalse(topology.disrupt(randomAsciiOfLength(10), randomFrom(partition2))); - assertFalse(topology.disrupt(randomAsciiOfLength(10), bridgeNode)); - assertFalse(topology.disrupt(randomFrom(partition1), randomAsciiOfLength(10))); - assertFalse(topology.disrupt(randomFrom(partition2), randomAsciiOfLength(10))); - assertFalse(topology.disrupt(bridgeNode, randomAsciiOfLength(10))); + assertFalse(topology.disrupt(randomAlphaOfLength(10), randomFrom(partition1))); + assertFalse(topology.disrupt(randomAlphaOfLength(10), randomFrom(partition2))); + assertFalse(topology.disrupt(randomAlphaOfLength(10), bridgeNode)); + assertFalse(topology.disrupt(randomFrom(partition1), randomAlphaOfLength(10))); + assertFalse(topology.disrupt(randomFrom(partition2), randomAlphaOfLength(10))); + assertFalse(topology.disrupt(bridgeNode, randomAlphaOfLength(10))); } } @@ -110,7 +110,7 @@ public class NetworkDisruptionTests extends ESTestCase { assert maxSize >= minSize; Set result = new HashSet<>(); for (int i = 0; i < minSize + randomInt(maxSize - minSize); i++) { - result.add(randomAsciiOfLength(10)); + result.add(randomAlphaOfLength(10)); } return result; } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java index 1b2384ba5fc..f9e5ff8981e 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java @@ -20,6 +20,7 @@ package org.elasticsearch.transport; import org.apache.lucene.util.IOUtils; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -79,7 +80,7 @@ public class MockTcpTransport extends TcpTransport private final Set openChannels = new HashSet<>(); - static { + static { ConnectionProfile.Builder builder = new ConnectionProfile.Builder(); builder.addConnections(1, TransportRequestOptions.Type.BULK, @@ -129,11 +130,7 @@ public class MockTcpTransport extends TcpTransport executor.execute(new AbstractRunnable() { @Override public void onFailure(Exception e) { - try { - onException(serverMockChannel, e); - } catch (IOException ex) { - logger.warn("failed on handling exception", ex); - } + onException(serverMockChannel, e); } @Override @@ -242,15 +239,18 @@ public class MockTcpTransport extends TcpTransport } @Override - protected void sendMessage(MockChannel mockChannel, BytesReference reference, Runnable sendListener) throws IOException { - synchronized (mockChannel) { - final Socket socket = mockChannel.activeChannel; - OutputStream outputStream = new BufferedOutputStream(socket.getOutputStream()); - reference.writeTo(outputStream); - outputStream.flush(); - } - if (sendListener != null) { - sendListener.run(); + protected void sendMessage(MockChannel mockChannel, BytesReference reference, ActionListener listener) { + try { + synchronized (mockChannel) { + final Socket socket = mockChannel.activeChannel; + OutputStream outputStream = new BufferedOutputStream(socket.getOutputStream()); + reference.writeTo(outputStream); + outputStream.flush(); + } + listener.onResponse(mockChannel); + } catch (IOException e) { + listener.onFailure(e); + onException(mockChannel, e); } } diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java index 0e5c86863f9..cdcdc3c99c3 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/ESTestCaseTests.java @@ -80,7 +80,7 @@ public class ESTestCaseTests extends ESTestCase { Map result = new HashMap<>(); int entries = randomInt(10); for (int i = 0; i < entries; i++) { - String key = randomAsciiOfLengthBetween(5, 15); + String key = randomAlphaOfLengthBetween(5, 15); int suprise = randomIntBetween(0, 4); switch (suprise) { case 0: @@ -108,7 +108,7 @@ public class ESTestCaseTests extends ESTestCase { } } if (depth > 0) { - result.put(randomAsciiOfLengthBetween(5, 15), randomStringObjectMap(depth - 1)); + result.put(randomAlphaOfLengthBetween(5, 15), randomStringObjectMap(depth - 1)); } return result; } @@ -123,6 +123,6 @@ public class ESTestCaseTests extends ESTestCase { } public void testRandomUniqueNormalUsageAlwayMoreThanOne() { - assertThat(randomUnique(() -> randomAsciiOfLengthBetween(1, 20), 10), hasSize(greaterThan(0))); + assertThat(randomUnique(() -> randomAlphaOfLengthBetween(1, 20), 10), hasSize(greaterThan(0))); } } diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java index fd4496a8678..0284a594883 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoverySettings; +import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESTestCase; @@ -40,6 +41,7 @@ import org.elasticsearch.transport.TransportSettings; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -347,15 +349,19 @@ public class InternalTestClusterTests extends ESTestCase { public void testDifferentRolesMaintainPathOnRestart() throws Exception { final Path baseDir = createTempDir(); final int numNodes = 5; - InternalTestCluster cluster = new InternalTestCluster(randomLong(), baseDir, true, true, 0, 0, "test", - new NodeConfigurationSource() { + InternalTestCluster cluster = new InternalTestCluster(randomLong(), baseDir, false, + false, 0, 0, "test", new NodeConfigurationSource() { @Override public Settings nodeSettings(int nodeOrdinal) { return Settings.builder() .put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), numNodes) .put(NetworkModule.HTTP_ENABLED.getKey(), false) .put(NetworkModule.TRANSPORT_TYPE_KEY, MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME) - .put(DiscoverySettings.INITIAL_STATE_TIMEOUT_SETTING.getKey(), 0).build(); + .put(DiscoverySettings.INITIAL_STATE_TIMEOUT_SETTING.getKey(), 0) + // speedup join timeout as setting initial state timeout to 0 makes split + // elections more likely + .put(ZenDiscovery.JOIN_TIMEOUT_SETTING.getKey(), "3s") + .build(); } @Override @@ -365,22 +371,32 @@ public class InternalTestClusterTests extends ESTestCase { } }, 0, randomBoolean(), "", Arrays.asList(MockTcpTransportPlugin.class, TestZenDiscovery.TestPlugin.class), Function.identity()); cluster.beforeTest(random(), 0.0); + List roles = new ArrayList<>(); + for (int i = 0; i < numNodes; i++) { + final DiscoveryNode.Role role = i == numNodes - 1 && roles.contains(MASTER) == false ? + MASTER : // last node and still no master + randomFrom(MASTER, DiscoveryNode.Role.DATA, DiscoveryNode.Role.INGEST); + roles.add(role); + } + + final Settings minMasterNodes = Settings.builder() + .put(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), + roles.stream().filter(role -> role == MASTER).count() / 2 + 1 + ).build(); try { Map> pathsPerRole = new HashMap<>(); for (int i = 0; i < numNodes; i++) { - final DiscoveryNode.Role role = i == numNodes -1 && pathsPerRole.containsKey(MASTER) == false ? - MASTER : // last noe and still no master ofr the cluster - randomFrom(MASTER, DiscoveryNode.Role.DATA, DiscoveryNode.Role.INGEST); + final DiscoveryNode.Role role = roles.get(i); final String node; switch (role) { case MASTER: - node = cluster.startMasterOnlyNode(Settings.EMPTY); + node = cluster.startMasterOnlyNode(minMasterNodes); break; case DATA: - node = cluster.startDataOnlyNode(Settings.EMPTY); + node = cluster.startDataOnlyNode(minMasterNodes); break; case INGEST: - node = cluster.startCoordinatingOnlyNode(Settings.EMPTY); + node = cluster.startCoordinatingOnlyNode(minMasterNodes); break; default: throw new IllegalStateException("get your story straight");