diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/Allocators.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/Allocators.java index 591fa400d18..39a2bdfca09 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/Allocators.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/Allocators.java @@ -46,10 +46,6 @@ public final class Allocators { private static class NoopGatewayAllocator extends GatewayAllocator { public static final NoopGatewayAllocator INSTANCE = new NoopGatewayAllocator(); - protected NoopGatewayAllocator() { - super(Settings.EMPTY); - } - @Override public void applyStartedShards(RoutingAllocation allocation, List startedShards) { // noop @@ -79,7 +75,7 @@ public final class Allocators { public static AllocationService createAllocationService(Settings settings, ClusterSettings clusterSettings) throws InvocationTargetException, NoSuchMethodException, InstantiationException, IllegalAccessException { - return new AllocationService(settings, + return new AllocationService( defaultAllocationDeciders(settings, clusterSettings), NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(settings), EmptyClusterInfoService.INSTANCE); } @@ -88,7 +84,7 @@ public final class Allocators { IllegalAccessException, InvocationTargetException, InstantiationException, NoSuchMethodException { Collection deciders = ClusterModule.createAllocationDeciders(settings, clusterSettings, Collections.emptyList()); - return new AllocationDeciders(settings, deciders); + return new AllocationDeciders(deciders); } diff --git a/build.gradle b/build.gradle index a2b79d31bad..358895a6078 100644 --- a/build.gradle +++ b/build.gradle @@ -103,10 +103,6 @@ subprojects { * in a branch if there are only betas and rcs in the branch so we have * *something* to test against. */ VersionCollection versions = new VersionCollection(file('server/src/main/java/org/elasticsearch/Version.java').readLines('UTF-8')) -if (versions.currentVersion != VersionProperties.elasticsearch) { - throw new GradleException("The last version in Versions.java [${versions.currentVersion}] does not match " + - "VersionProperties.elasticsearch [${VersionProperties.elasticsearch}]") -} // build metadata from previous build, contains eg hashes for bwc builds String buildMetadataValue = System.getenv('BUILD_METADATA') @@ -140,26 +136,16 @@ task verifyVersions { if (gradle.startParameter.isOffline()) { throw new GradleException("Must run in online mode to verify versions") } - // Read the list from maven central - Node xml + // Read the list from maven central. + // Fetch the metadata an parse the xml into Version instances because it's more straight forward here + // rather than bwcVersion ( VersionCollection ). new URL('https://repo1.maven.org/maven2/org/elasticsearch/elasticsearch/maven-metadata.xml').openStream().withStream { s -> - xml = new XmlParser().parse(s) - } - Set knownVersions = new TreeSet<>(xml.versioning.versions.version.collect { it.text() }.findAll { it ==~ /\d+\.\d+\.\d+/ }.collect { Version.fromString(it) }) - - // Limit the known versions to those that should be index compatible, and are not future versions - knownVersions = knownVersions.findAll { it.major >= bwcVersions.currentVersion.major - 1 && it.before(VersionProperties.elasticsearch) } - - /* Limit the listed versions to those that have been marked as released. - * Versions not marked as released don't get the same testing and we want - * to make sure that we flip all unreleased versions to released as soon - * as possible after release. */ - Set actualVersions = new TreeSet<>(bwcVersions.indexCompatible.findAll { false == it.snapshot }) - - // Finally, compare! - if (knownVersions.equals(actualVersions) == false) { - throw new GradleException("out-of-date released versions\nActual :" + actualVersions + "\nExpected:" + knownVersions + - "\nUpdate Version.java. Note that Version.CURRENT doesn't count because it is not released.") + bwcVersions.compareToAuthoritative( + new XmlParser().parse(s) + .versioning.versions.version + .collect { it.text() }.findAll { it ==~ /\d+\.\d+\.\d+/ } + .collect { Version.fromString(it) } + ) } } } @@ -251,20 +237,17 @@ subprojects { "org.elasticsearch.plugin:percolator-client:${version}": ':modules:percolator', "org.elasticsearch.plugin:rank-eval-client:${version}": ':modules:rank-eval', ] - - bwcVersions.snapshotProjectNames.each { snapshotName -> - Version snapshot = bwcVersions.getSnapshotForProject(snapshotName) - if (snapshot != null ) { - String snapshotProject = ":distribution:bwc:${snapshotName}" - project(snapshotProject).ext.bwcVersion = snapshot - ext.projectSubstitutions["org.elasticsearch.distribution.deb:elasticsearch:${snapshot}"] = snapshotProject - ext.projectSubstitutions["org.elasticsearch.distribution.rpm:elasticsearch:${snapshot}"] = snapshotProject - ext.projectSubstitutions["org.elasticsearch.distribution.zip:elasticsearch:${snapshot}"] = snapshotProject - if (snapshot.onOrAfter('6.3.0')) { - ext.projectSubstitutions["org.elasticsearch.distribution.deb:elasticsearch-oss:${snapshot}"] = snapshotProject - ext.projectSubstitutions["org.elasticsearch.distribution.rpm:elasticsearch-oss:${snapshot}"] = snapshotProject - ext.projectSubstitutions["org.elasticsearch.distribution.zip:elasticsearch-oss:${snapshot}"] = snapshotProject - } + // substitute unreleased versions with projects that check out and build locally + bwcVersions.forPreviousUnreleased { VersionCollection.UnreleasedVersionInfo unreleasedVersion -> + Version unreleased = unreleasedVersion.version + String snapshotProject = ":distribution:bwc:${unreleasedVersion.gradleProjectName}" + ext.projectSubstitutions["org.elasticsearch.distribution.deb:elasticsearch:${unreleased}"] = snapshotProject + ext.projectSubstitutions["org.elasticsearch.distribution.rpm:elasticsearch:${unreleased}"] = snapshotProject + ext.projectSubstitutions["org.elasticsearch.distribution.zip:elasticsearch:${unreleased}"] = snapshotProject + if (unreleased.onOrAfter('6.3.0')) { + ext.projectSubstitutions["org.elasticsearch.distribution.deb:elasticsearch-oss:${unreleased}"] = snapshotProject + ext.projectSubstitutions["org.elasticsearch.distribution.rpm:elasticsearch-oss:${unreleased}"] = snapshotProject + ext.projectSubstitutions["org.elasticsearch.distribution.zip:elasticsearch-oss:${unreleased}"] = snapshotProject } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionCollection.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionCollection.groovy deleted file mode 100644 index 063dcf7d3bb..00000000000 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionCollection.groovy +++ /dev/null @@ -1,353 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.gradle - -import org.gradle.api.GradleException -import org.gradle.api.InvalidUserDataException - -import java.util.regex.Matcher - -/** - * The collection of version constants declared in Version.java, for use in BWC testing. - * - * if major+1 released: released artifacts from $version down to major-1.highestMinor.highestPatch, none of these should be snapshots, period. - * if major+1 unreleased: - * - if released: - * -- caveat 0: snapshot for the major-1.highestMinor.highestPatch - * - if unreleased: - * -- caveat 0: snapshot for the major-1.highestMinor.highestPatch - * -- caveat 1: every same major lower minor branch should also be tested if its released, and if not, its a snapshot. There should only be max 2 of these. - * -- caveat 2: the largest released minor branch before the unreleased minor should also be a snapshot - * -- caveat 3: if the current version is a different major than the previous rules apply to major - 1 of the current version - * - * Please note that the caveat's also correspond with the 4 types of snapshots. - * - Caveat 0 - always maintenanceBugfixSnapshot. - * - Caveat 1 - This is tricky. If caveat 3 applies, the highest matching value is nextMinorSnapshot, if there is another it is the stagedMinorSnapshot. - * If caveat 3 does not apply then the only possible value is the stagedMinorSnapshot. - * - Caveat 2 - always nextBugfixSnapshot - * - Caveat 3 - this only changes the applicability of Caveat 1 - * - * Notes on terminology: - * - The case for major+1 being released is accomplished through the isReleasableBranch value. If this is false, then the branch is no longer - * releasable, meaning not to test against any snapshots. - * - Released is defined as having > 1 suffix-free version in a major.minor series. For instance, only 6.2.0 means unreleased, but a - * 6.2.0 and 6.2.1 mean that 6.2.0 was released already. - */ -class VersionCollection { - - private final List versions - Version nextMinorSnapshot - Version stagedMinorSnapshot - Version nextBugfixSnapshot - Version maintenanceBugfixSnapshot - final Version currentVersion - private final TreeSet versionSet = new TreeSet<>() - final List snapshotProjectNames = ['next-minor-snapshot', - 'staged-minor-snapshot', - 'next-bugfix-snapshot', - 'maintenance-bugfix-snapshot'] - - // When we roll 8.0 its very likely these will need to be extracted from this class - private final boolean isReleasableBranch = true - - /** - * Construct a VersionCollection from the lines of the Version.java file. The basic logic for the following is pretty straight forward. - - * @param versionLines The lines of the Version.java file. - */ - VersionCollection(List versionLines) { - final boolean buildSnapshot = System.getProperty("build.snapshot", "true") == "true" - - List versions = [] - // This class should be converted wholesale to use the treeset - - for (final String line : versionLines) { - final Matcher match = line =~ /\W+public static final Version V_(\d+)_(\d+)_(\d+)(_alpha\d+|_beta\d+|_rc\d+)? .*/ - if (match.matches()) { - final Version foundVersion = new Version( - Integer.parseInt(match.group(1)), Integer.parseInt(match.group(2)), - Integer.parseInt(match.group(3)), (match.group(4) ?: '').replace('_', '-'), false) - safeAddToSet(foundVersion) - } - } - - if (versionSet.empty) { - throw new GradleException("Unexpectedly found no version constants in Versions.java") - } - - // If the major version has been released, then remove all of the alpha/beta/rc versions that exist in the set - versionSet.removeAll { it.suffix.isEmpty() == false && isMajorReleased(it, versionSet) } - - // set currentVersion - Version lastVersion = versionSet.last() - currentVersion = new Version(lastVersion.major, lastVersion.minor, lastVersion.revision, lastVersion.suffix, buildSnapshot) - - // remove all of the potential alpha/beta/rc from the currentVersion - versionSet.removeAll { - it.suffix.isEmpty() == false && - it.major == currentVersion.major && - it.minor == currentVersion.minor && - it.revision == currentVersion.revision } - - // re-add the currentVersion to the set - versionSet.add(currentVersion) - - if (isReleasableBranch) { - if (isReleased(currentVersion)) { - // caveat 0 - if the minor has been released then it only has a maintenance version - // go back 1 version to get the last supported snapshot version of the line, which is a maint bugfix - Version highestMinor = getHighestPreviousMinor(currentVersion.major) - maintenanceBugfixSnapshot = replaceAsSnapshot(highestMinor) - } else { - // caveat 3 - if our currentVersion is a X.0.0, we need to check X-1 minors to see if they are released - if (currentVersion.minor == 0) { - for (Version version: getMinorTips(currentVersion.major - 1)) { - if (isReleased(version) == false) { - // caveat 1 - This should only ever contain 2 non released branches in flight. An example is 6.x is frozen, - // and 6.2 is cut but not yet released there is some simple logic to make sure that in the case of more than 2, - // it will bail. The order is that the minor snapshot is fulfilled first, and then the staged minor snapshot - if (nextMinorSnapshot == null) { - // it has not been set yet - nextMinorSnapshot = replaceAsSnapshot(version) - } else if (stagedMinorSnapshot == null) { - stagedMinorSnapshot = replaceAsSnapshot(version) - } else { - throw new GradleException("More than 2 snapshot version existed for the next minor and staged (frozen) minors.") - } - } else { - // caveat 2 - this is the last minor snap for this major, so replace the highest (last) one of these and break - nextBugfixSnapshot = replaceAsSnapshot(version) - // we only care about the largest minor here, so in the case of 6.1 and 6.0, it will only get 6.1 - break - } - } - // caveat 0 - the last supported snapshot of the line is on a version that we don't support (N-2) - maintenanceBugfixSnapshot = null - } else { - // caveat 3 did not apply. version is not a X.0.0, so we are somewhere on a X.Y line - // only check till minor == 0 of the major - for (Version version: getMinorTips(currentVersion.major)) { - if (isReleased(version) == false) { - // caveat 1 - This should only ever contain 0 or 1 branch in flight. An example is 6.x is frozen, and 6.2 is cut - // but not yet released there is some simple logic to make sure that in the case of more than 1, it will bail - if (stagedMinorSnapshot == null) { - stagedMinorSnapshot = replaceAsSnapshot(version) - } else { - throw new GradleException("More than 1 snapshot version existed for the staged (frozen) minors.") - } - } else { - // caveat 2 - this is the last minor snap for this major, so replace the highest (last) one of these and break - nextBugfixSnapshot = replaceAsSnapshot(version) - // we only care about the largest minor here, so in the case of 6.1 and 6.0, it will only get 6.1 - break - } - } - // caveat 0 - now dip back 1 version to get the last supported snapshot version of the line - Version highestMinor = getHighestPreviousMinor(currentVersion.major) - maintenanceBugfixSnapshot = replaceAsSnapshot(highestMinor) - } - } - } - - this.versions = Collections.unmodifiableList(versionSet.toList()) - } - - /** - * @return The list of versions read from the Version.java file - */ - List getVersions() { - return versions - } - - /** - * Index compat supports 1 previous entire major version. For instance, any 6.x test for this would test all of 5 up to that 6.x version - * - * @return All earlier versions that should be tested for index BWC with the current version. - */ - List getIndexCompatible() { - int actualMajor = (currentVersion.major == 5 ? 2 : currentVersion.major - 1) - return versionSet - .tailSet(Version.fromString("${actualMajor}.0.0")) - .headSet(currentVersion) - .asList() - } - - /** - * Ensures the types of snapshot are not null and are also in the index compat list - */ - List getSnapshotsIndexCompatible() { - List compatSnapshots = [] - List allCompatVersions = getIndexCompatible() - if (allCompatVersions.contains(nextMinorSnapshot)) { - compatSnapshots.add(nextMinorSnapshot) - } - if (allCompatVersions.contains(stagedMinorSnapshot)) { - compatSnapshots.add(stagedMinorSnapshot) - } - if (allCompatVersions.contains(nextBugfixSnapshot)) { - compatSnapshots.add(nextBugfixSnapshot) - } - if (allCompatVersions.contains(maintenanceBugfixSnapshot)) { - compatSnapshots.add(maintenanceBugfixSnapshot) - } - - return compatSnapshots - } - - /** - * Wire compat supports the last minor of the previous major. For instance, any 6.x test would test 5.6 up to that 6.x version - * - * @return All earlier versions that should be tested for wire BWC with the current version. - */ - List getWireCompatible() { - // Get the last minor of the previous major - Version lowerBound = getHighestPreviousMinor(currentVersion.major) - return versionSet - .tailSet(Version.fromString("${lowerBound.major}.${lowerBound.minor}.0")) - .headSet(currentVersion) - .toList() - } - - /** - * Ensures the types of snapshot are not null and are also in the wire compat list - */ - List getSnapshotsWireCompatible() { - List compatSnapshots = [] - List allCompatVersions = getWireCompatible() - if (allCompatVersions.contains(nextMinorSnapshot)) { - compatSnapshots.add(nextMinorSnapshot) - } - if (allCompatVersions.contains(stagedMinorSnapshot)) { - compatSnapshots.add(stagedMinorSnapshot) - } - if (allCompatVersions.contains(nextBugfixSnapshot)) { - compatSnapshots.add(nextBugfixSnapshot) - } - if (allCompatVersions.contains(maintenanceBugfixSnapshot)) { - compatSnapshots.add(maintenanceBugfixSnapshot) - } - // There was no wire compat for the 2.x line - compatSnapshots.removeAll {it.major == 2} - - return compatSnapshots - } - - /** - * Grabs the proper snapshot based on the name passed in. These names should correspond with gradle project names under bwc. If you - * are editing this if/else it is only because you added another project under :distribution:bwc. Do not modify this method or its - * reasoning for throwing the exception unless you are sure that it will not harm :distribution:bwc. - */ - Version getSnapshotForProject(String snapshotProjectName) { - if (snapshotProjectName == 'next-minor-snapshot') { - return nextMinorSnapshot - } else if (snapshotProjectName == 'staged-minor-snapshot') { - return stagedMinorSnapshot - } else if (snapshotProjectName == 'maintenance-bugfix-snapshot') { - return maintenanceBugfixSnapshot - } else if (snapshotProjectName == 'next-bugfix-snapshot') { - return nextBugfixSnapshot - } else { - throw new InvalidUserDataException("Unsupported project name ${snapshotProjectName}") - } - } - - /** - * Uses basic logic about our releases to determine if this version has been previously released - */ - private boolean isReleased(Version version) { - return version.revision > 0 - } - - /** - * Validates that the count of non suffixed (alpha/beta/rc) versions in a given major to major+1 is greater than 1. - * This means that there is more than just a major.0.0 or major.0.0-alpha in a branch to signify it has been prevously released. - */ - private boolean isMajorReleased(Version version, TreeSet items) { - return items - .tailSet(Version.fromString("${version.major}.0.0")) - .headSet(Version.fromString("${version.major + 1}.0.0")) - .count { it.suffix.isEmpty() } // count only non suffix'd versions as actual versions that may be released - .intValue() > 1 - } - - /** - * Gets the largest version previous major version based on the nextMajorVersion passed in. - * If you have a list [5.0.2, 5.1.2, 6.0.1, 6.1.1] and pass in 6 for the nextMajorVersion, it will return you 5.1.2 - */ - private Version getHighestPreviousMinor(Integer nextMajorVersion) { - SortedSet result = versionSet.headSet(Version.fromString("${nextMajorVersion}.0.0")) - return result.isEmpty() ? null : result.last() - } - - /** - * Helper function for turning a version into a snapshot version, removing and readding it to the tree - */ - private Version replaceAsSnapshot(Version version) { - versionSet.remove(version) - Version snapshotVersion = new Version(version.major, version.minor, version.revision, version.suffix, true) - safeAddToSet(snapshotVersion) - return snapshotVersion - } - - /** - * Safely adds a value to the treeset, or bails if the value already exists. - * @param version - */ - private void safeAddToSet(Version version) { - if (versionSet.add(version) == false) { - throw new GradleException("Versions.java contains duplicate entries for ${version}") - } - } - - /** - * Gets the entire set of major.minor.* given those parameters. - */ - private SortedSet getMinorSetForMajor(Integer major, Integer minor) { - return versionSet - .tailSet(Version.fromString("${major}.${minor}.0")) - .headSet(Version.fromString("${major}.${minor + 1}.0")) - } - - /** - * Gets the entire set of major.* to the currentVersion - */ - private SortedSet getMajorSet(Integer major) { - return versionSet - .tailSet(Version.fromString("${major}.0.0")) - .headSet(currentVersion) - } - - /** - * Gets the tip of each minor set and puts it in a list. - * - * examples: - * [1.0.0, 1.1.0, 1.1.1, 1.2.0, 1.3.1] will return [1.0.0, 1.1.1, 1.2.0, 1.3.1] - * [1.0.0, 1.0.1, 1.0.2, 1.0.3, 1.0.4] will return [1.0.4] - */ - private List getMinorTips(Integer major) { - TreeSet majorSet = getMajorSet(major) - List minorList = new ArrayList<>() - for (int minor = majorSet.last().minor; minor >= 0; minor--) { - TreeSet minorSetInMajor = getMinorSetForMajor(major, minor) - minorList.add(minorSetInMajor.last()) - } - return minorList - } -} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/Version.java b/buildSrc/src/main/java/org/elasticsearch/gradle/Version.java index 53855716840..04e884d8818 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/Version.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/Version.java @@ -21,6 +21,10 @@ public final class Version implements Comparable { private static final Pattern pattern = Pattern.compile("(\\d)+\\.(\\d+)\\.(\\d+)(-alpha\\d+|-beta\\d+|-rc\\d+)?(-SNAPSHOT)?"); + public Version(int major, int minor, int revision) { + this(major, minor, revision, "", false); + } + public Version(int major, int minor, int revision, String suffix, boolean snapshot) { Objects.requireNonNull(major, "major version can't be null"); Objects.requireNonNull(minor, "minor version can't be null"); @@ -31,25 +35,8 @@ public final class Version implements Comparable { this.snapshot = snapshot; this.suffix = suffix == null ? "" : suffix; - int suffixOffset = 0; - if (this.suffix.isEmpty()) { - // no suffix will be considered smaller, uncomment to change that - // suffixOffset = 100; - } else { - if (this.suffix.contains("alpha")) { - suffixOffset += parseSuffixNumber(this.suffix.substring(6)); - } else if (this.suffix.contains("beta")) { - suffixOffset += 25 + parseSuffixNumber(this.suffix.substring(5)); - } else if (this.suffix.contains("rc")) { - suffixOffset += 50 + parseSuffixNumber(this.suffix.substring(3)); - } - else { - throw new IllegalArgumentException("Suffix must contain one of: alpha, beta or rc"); - } - } - // currently snapshot is not taken into account - this.id = major * 10000000 + minor * 100000 + revision * 1000 + suffixOffset * 10 /*+ (snapshot ? 1 : 0)*/; + this.id = major * 10000000 + minor * 100000 + revision * 1000; } private static int parseSuffixNumber(String substring) { @@ -136,10 +123,7 @@ public final class Version implements Comparable { Version version = (Version) o; return major == version.major && minor == version.minor && - revision == version.revision && - id == version.id && - snapshot == version.snapshot && - Objects.equals(suffix, version.suffix); + revision == version.revision; } @Override @@ -176,4 +160,5 @@ public final class Version implements Comparable { public int compareTo(Version other) { return Integer.compare(getId(), other.getId()); } + } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/VersionCollection.java b/buildSrc/src/main/java/org/elasticsearch/gradle/VersionCollection.java new file mode 100644 index 00000000000..5b82e0d942b --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/VersionCollection.java @@ -0,0 +1,341 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Consumer; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static java.util.Collections.emptyList; +import static java.util.Collections.unmodifiableList; + +/** + * A container for elasticsearch supported version information used in BWC testing. + * + * Parse the Java source file containing the versions declarations and use the known rules to figure out which are all + * the version the current one is wire and index compatible with. + * On top of this, figure out which of these are unreleased and provide the branch they can be built from. + * + * Note that in this context, currentVersion is the unreleased version this build operates on. + * At any point in time there will surely be four such unreleased versions being worked on, + * thus currentVersion will be one of these. + * + * Considering: + *
+ *
M, M > 0
+ *
last released major
+ *
N, N > 0
+ *
last released minor
+ *
+ * + *
    + *
  • the unreleased major, M+1.0.0 on the `master` branch
  • + *
  • the unreleased minor, M.N.0 on the `M.x` (x is literal) branch
  • + *
  • the unreleased bugfix, M.N.c (c > 0) on the `M.b` branch
  • + *
  • the unreleased maintenance, M-1.d.e ( d > 0, e > 0) on the `(M-1).d` branch
  • + *
+ * In addition to these, there will be a fifth one when a minor reaches feature freeze, we call this the staged + * version: + *
    + *
  • the unreleased staged, M.N-2.0 (N > 2) on the `M.(N-2)` branch
  • + *
+ * + * Each build is only concerned with versions before it, as those are the ones that need to be tested + * for backwards compatibility. We never look forward, and don't add forward facing version number to branches of previous + * version. + * + * Each branch has a current version, and expected compatible versions are parsed from the server code's Version` class. + * We can reliably figure out which the unreleased versions are due to the convention of always adding the next unreleased + * version number to server in all branches when a version is released. + * E.x when M.N.c is released M.N.c+1 is added to the Version class mentioned above in all the following branches: + * `M.b`, `M.x` and `master` so we can reliably assume that the leafs of the version tree are unreleased. + * This convention is enforced by checking the versions we consider to be unreleased against an + * authoritative source (maven central). + * We are then able to map the unreleased version to branches in git and Gradle projects that are capable of checking + * out and building them, so we can include these in the testing plan as well. + */ +public class VersionCollection { + + private static final Pattern LINE_PATTERN = Pattern.compile( + "\\W+public static final Version V_(\\d+)_(\\d+)_(\\d+)(_alpha\\d+|_beta\\d+|_rc\\d+)? .*" + ); + + private final Version currentVersion; + private final Map> groupByMajor; + + public class UnreleasedVersionInfo { + public final Version version; + public final String branch; + public final String gradleProjectName; + + UnreleasedVersionInfo(Version version, String branch, String gradleProjectName) { + this.version = version; + this.branch = branch; + this.gradleProjectName = gradleProjectName; + } + } + + public VersionCollection(List versionLines) { + this(versionLines, VersionProperties.getElasticsearch()); + } + + protected VersionCollection(List versionLines, Version currentVersionProperty) { + groupByMajor = versionLines.stream() + .map(LINE_PATTERN::matcher) + .filter(Matcher::matches) + .map(match -> new Version( + Integer.parseInt(match.group(1)), + Integer.parseInt(match.group(2)), + Integer.parseInt(match.group(3)), + (match.group(4) == null ? "" : match.group(4)).replace('_', '-'), + false + )) + .sorted() + .filter(version -> version.getSuffix().isEmpty() || version.equals(currentVersionProperty)) + .collect(Collectors.groupingBy(Version::getMajor, Collectors.toList())); + + if (groupByMajor.isEmpty()) { + throw new IllegalArgumentException("Could not parse any versions"); + } + + currentVersion = getLatestVersionByKey( + groupByMajor, + groupByMajor.keySet().stream().max(Integer::compareTo) + .orElseThrow(() -> new IllegalStateException("Unexpected number of versions in collection")) + ); + + assertCurrentVersionMatchesParsed(currentVersionProperty); + + assertNoOlderThanTwoMajors(); + + markUnreleasedAsSnapshot(); + } + + private void markUnreleasedAsSnapshot() { + getUnreleased().forEach(uv -> + groupByMajor.get(uv.getMajor()).set( + groupByMajor.get(uv.getMajor()).indexOf(uv), + new Version(uv.getMajor(), uv.getMinor(), uv.getRevision(),uv.getSuffix(), true) + ) + ); + } + + private void assertNoOlderThanTwoMajors() { + Set majors = groupByMajor.keySet(); + if (majors.size() != 2 && currentVersion.getMinor() != 0 && currentVersion.getMajor() != 0) { + throw new IllegalStateException( + "Expected exactly 2 majors in parsed versions but found: " + majors + ); + } + } + + private void assertCurrentVersionMatchesParsed(Version currentVersionProperty) { + if (currentVersionProperty.equals(currentVersion) == false) { + throw new IllegalStateException( + "Parsed versions latest version does not match the one configured in build properties. " + + "Parsed latest version is " + currentVersion + " but the build has " + + currentVersionProperty + ); + } + } + + public void forPreviousUnreleased(Consumer consumer) { + getUnreleased().stream() + .filter(version -> version.equals(currentVersion) == false) + .forEach(version -> consumer.accept( + new UnreleasedVersionInfo( + version, + getBranchFor(version), + getGradleProjectNameFor(version) + ) + )); + } + + private String getGradleProjectNameFor(Version version) { + if (version.equals(currentVersion)) { + throw new IllegalArgumentException("The Gradle project to build " + version + " is the current build."); + } + Map> releasedMajorGroupedByMinor = getReleasedMajorGroupedByMinor(); + + if (version.getRevision() == 0) { + if (releasedMajorGroupedByMinor + .get(releasedMajorGroupedByMinor.keySet().stream().max(Integer::compareTo).orElse(0)) + .contains(version)) { + return "minor"; + } else { + return "staged"; + } + } else { + if (releasedMajorGroupedByMinor + .getOrDefault(version.getMinor(), emptyList()) + .contains(version)) { + return "bugfix"; + } else { + return "maintenance"; + } + } + } + + private String getBranchFor(Version version) { + switch (getGradleProjectNameFor(version)) { + case "minor": + return version.getMajor() + ".x"; + case "staged": + case "maintenance": + case "bugfix": + return version.getMajor() + "." + version.getMinor(); + default: + throw new IllegalStateException("Unexpected Gradle project name"); + } + } + + public List getUnreleased() { + List unreleased = new ArrayList<>(); + // The current version is being worked, is always unreleased + unreleased.add(currentVersion); + + // the tip of the previous major is unreleased for sure, be it a minor or a bugfix + unreleased.add(getLatestVersionByKey(this.groupByMajor, currentVersion.getMajor() - 1)); + + final Map> groupByMinor = getReleasedMajorGroupedByMinor(); + int greatestMinor = groupByMinor.keySet().stream().max(Integer::compareTo).orElse(0); + + // the last bugfix for this minor series is always unreleased + unreleased.add(getLatestVersionByKey(groupByMinor, greatestMinor)); + + if (groupByMinor.get(greatestMinor).size() == 1) { + // we found an unreleased minor + unreleased.add(getLatestVersionByKey(groupByMinor, greatestMinor - 1)); + if (groupByMinor.getOrDefault(greatestMinor - 1, emptyList()).size() == 1) { + // we found that the previous minor is staged but not yet released + // in this case, the minor before that has a bugfix + unreleased.add(getLatestVersionByKey(groupByMinor, greatestMinor - 2)); + } + } + + return unmodifiableList( + unreleased.stream() + .sorted() + .distinct() + .collect(Collectors.toList()) + ); + } + + private Version getLatestVersionByKey(Map> groupByMajor, int key) { + return groupByMajor.getOrDefault(key, emptyList()).stream() + .max(Version::compareTo) + .orElseThrow(() -> new IllegalStateException("Unexpected number of versions in collection")); + } + + private Map> getReleasedMajorGroupedByMinor() { + List currentMajorVersions = groupByMajor.get(currentVersion.getMajor()); + List previousMajorVersions = groupByMajor.get(currentVersion.getMajor() - 1); + + final Map> groupByMinor; + if (currentMajorVersions.size() == 1) { + // Current is an unreleased major: x.0.0 so we have to look for other unreleased versions in the previous major + groupByMinor = previousMajorVersions.stream() + .collect(Collectors.groupingBy(Version::getMinor, Collectors.toList())); + } else { + groupByMinor = currentMajorVersions.stream() + .collect(Collectors.groupingBy(Version::getMinor, Collectors.toList())); + } + return groupByMinor; + } + + public void compareToAuthoritative(List authoritativeReleasedVersions) { + Set notReallyReleased = new HashSet<>(getReleased()); + notReallyReleased.removeAll(authoritativeReleasedVersions); + if (notReallyReleased.isEmpty() == false) { + throw new IllegalStateException( + "out-of-date released versions" + + "\nFollowing versions are not really released, but the build thinks they are: " + notReallyReleased + ); + } + + Set incorrectlyConsideredUnreleased = new HashSet<>(authoritativeReleasedVersions); + incorrectlyConsideredUnreleased.retainAll(getUnreleased()); + if (incorrectlyConsideredUnreleased.isEmpty() == false) { + throw new IllegalStateException( + "out-of-date released versions" + + "\nBuild considers versions unreleased, " + + "but they are released according to an authoritative source: " + incorrectlyConsideredUnreleased + + "\nThe next versions probably needs to be added to Version.java (CURRENT doesn't count)." + ); + } + } + + private List getReleased() { + List unreleased = getUnreleased(); + return groupByMajor.values().stream() + .flatMap(Collection::stream) + .filter(each -> unreleased.contains(each) == false) + .collect(Collectors.toList()); + } + + public List getIndexCompatible() { + return unmodifiableList( + Stream.concat( + groupByMajor.get(currentVersion.getMajor() - 1).stream(), + groupByMajor.get(currentVersion.getMajor()).stream() + ) + .filter(version -> version.equals(currentVersion) == false) + .collect(Collectors.toList()) + ); + } + + public List getWireCompatible() { + List wireCompat = new ArrayList<>(); + + List prevMajors = groupByMajor.get(currentVersion.getMajor() - 1); + int minor = prevMajors.get(prevMajors.size() - 1).getMinor(); + for (int i = prevMajors.size() - 1; + i > 0 && prevMajors.get(i).getMinor() == minor; + i-- + ) { + wireCompat.add(prevMajors.get(i)); + } + wireCompat.addAll(groupByMajor.get(currentVersion.getMajor())); + wireCompat.remove(currentVersion); + wireCompat.sort(Version::compareTo); + + return unmodifiableList(wireCompat); + } + + public List getUnreleasedIndexCompatible() { + List unreleasedIndexCompatible = new ArrayList<>(getIndexCompatible()); + unreleasedIndexCompatible.retainAll(getUnreleased()); + return unmodifiableList(unreleasedIndexCompatible); + } + + public List getUnreleasedWireCompatible() { + List unreleasedWireCompatible = new ArrayList<>(getWireCompatible()); + unreleasedWireCompatible.retainAll(getUnreleased()); + return unmodifiableList(unreleasedWireCompatible); + } + +} diff --git a/buildSrc/src/test/groovy/org/elasticsearch/gradle/VersionCollectionTests.groovy b/buildSrc/src/test/groovy/org/elasticsearch/gradle/VersionCollectionTests.groovy deleted file mode 100644 index f6b9cb5fc95..00000000000 --- a/buildSrc/src/test/groovy/org/elasticsearch/gradle/VersionCollectionTests.groovy +++ /dev/null @@ -1,236 +0,0 @@ -package org.elasticsearch.gradle - -import org.elasticsearch.gradle.test.GradleUnitTestCase -import org.junit.Test - -class VersionCollectionTests extends GradleUnitTestCase { - - String formatVersion(String version) { - return " public static final Version V_${version.replaceAll("\\.", "_")} " - } - List allVersions = [formatVersion('5.0.0'), formatVersion('5.0.0_alpha1'), formatVersion('5.0.0_alpha2'), formatVersion('5.0.0_beta1'), - formatVersion('5.0.0_rc1'),formatVersion('5.0.0_rc2'),formatVersion('5.0.1'), formatVersion('5.0.2'), - formatVersion('5.1.1'), formatVersion('5.1.2'), formatVersion('5.2.0'), formatVersion('5.2.1'), formatVersion('6.0.0'), - formatVersion('6.0.1'), formatVersion('6.1.0'), formatVersion('6.1.1'), formatVersion('6.2.0'), formatVersion('6.3.0'), - formatVersion('7.0.0_alpha1'), formatVersion('7.0.0_alpha2')] - - /** - * This validates the logic of being on a unreleased major branch with a staged major-1.minor sibling. This case happens when a version is - * branched from Major-1.x At the time of this writing 6.2 is unreleased and 6.3 is the 6.x branch. This test simulates the behavior - * from 7.0 perspective, or master at the time of this writing. - */ - @Test - void testAgainstMajorUnreleasedWithExistingStagedMinorRelease() { - VersionCollection vc = new VersionCollection(allVersions) - assertNotNull(vc) - assertEquals(vc.nextMinorSnapshot, Version.fromString("6.3.0-SNAPSHOT")) - assertEquals(vc.stagedMinorSnapshot, Version.fromString("6.2.0-SNAPSHOT")) - assertEquals(vc.nextBugfixSnapshot, Version.fromString("6.1.1-SNAPSHOT")) - assertNull(vc.maintenanceBugfixSnapshot) - - vc.indexCompatible.containsAll(vc.versions) - - // This should contain the same list sans the current version - List indexCompatList = [Version.fromString("6.0.0"), Version.fromString("6.0.1"), - Version.fromString("6.1.0"), Version.fromString("6.1.1-SNAPSHOT"), - Version.fromString("6.2.0-SNAPSHOT"), Version.fromString("6.3.0-SNAPSHOT")] - assertTrue(indexCompatList.containsAll(vc.indexCompatible)) - assertTrue(vc.indexCompatible.containsAll(indexCompatList)) - - List wireCompatList = [Version.fromString("6.3.0-SNAPSHOT")] - assertTrue(wireCompatList.containsAll(vc.wireCompatible)) - assertTrue(vc.wireCompatible.containsAll(wireCompatList)) - - assertEquals(vc.snapshotsIndexCompatible.size(), 3) - assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("6.3.0-SNAPSHOT"))) - assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("6.2.0-SNAPSHOT"))) - assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("6.1.1-SNAPSHOT"))) - - assertEquals(vc.snapshotsWireCompatible.size(), 1) - assertEquals(vc.snapshotsWireCompatible.first(), Version.fromString("6.3.0-SNAPSHOT")) - } - - /** - * This validates the logic of being on a unreleased major branch without a staged major-1.minor sibling. This case happens once a staged, - * unreleased minor is released. At the time of this writing 6.2 is unreleased, so adding a 6.2.1 simulates a 6.2 release. This test - * simulates the behavior from 7.0 perspective, or master at the time of this writing. - */ - @Test - void testAgainstMajorUnreleasedWithoutStagedMinorRelease() { - List localVersion = allVersions.clone() - localVersion.add(formatVersion('6.2.1')) // release 6.2 - - VersionCollection vc = new VersionCollection(localVersion) - assertNotNull(vc) - assertEquals(vc.nextMinorSnapshot, Version.fromString("6.3.0-SNAPSHOT")) - assertEquals(vc.stagedMinorSnapshot, null) - assertEquals(vc.nextBugfixSnapshot, Version.fromString("6.2.1-SNAPSHOT")) - assertNull(vc.maintenanceBugfixSnapshot) - - vc.indexCompatible.containsAll(vc.versions) - - // This should contain the same list sans the current version - List indexCompatList = [Version.fromString("6.0.0"), Version.fromString("6.0.1"), - Version.fromString("6.1.0"), Version.fromString("6.1.1"), - Version.fromString("6.2.0"), Version.fromString("6.2.1-SNAPSHOT"), - Version.fromString("6.3.0-SNAPSHOT")] - assertTrue(indexCompatList.containsAll(vc.indexCompatible)) - assertTrue(vc.indexCompatible.containsAll(indexCompatList)) - - List wireCompatList = [Version.fromString("6.3.0-SNAPSHOT")] - assertTrue(wireCompatList.containsAll(vc.wireCompatible)) - assertTrue(vc.wireCompatible.containsAll(wireCompatList)) - - assertEquals(vc.snapshotsIndexCompatible.size(), 2) - assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("6.3.0-SNAPSHOT"))) - assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("6.2.1-SNAPSHOT"))) - - assertEquals(vc.snapshotsWireCompatible.size(), 1) - assertEquals(vc.snapshotsWireCompatible.first(), Version.fromString("6.3.0-SNAPSHOT")) - } - - /** - * This validates the logic of being on a unreleased minor branch with a staged minor sibling. This case happens when a version is - * branched from Major.x At the time of this writing 6.2 is unreleased and 6.3 is the 6.x branch. This test simulates the behavior - * from 6.3 perspective. - */ - @Test - void testAgainstMinorReleasedBranch() { - List localVersion = allVersions.clone() - localVersion.removeAll { it.toString().contains('7_0_0')} // remove all the 7.x so that the actual version is 6.3 (6.x) - VersionCollection vc = new VersionCollection(localVersion) - assertNotNull(vc) - assertEquals(vc.nextMinorSnapshot, null) - assertEquals(vc.stagedMinorSnapshot, Version.fromString("6.2.0-SNAPSHOT")) - assertEquals(vc.nextBugfixSnapshot, Version.fromString("6.1.1-SNAPSHOT")) - assertEquals(vc.maintenanceBugfixSnapshot, Version.fromString("5.2.1-SNAPSHOT")) - - // This should contain the same list sans the current version - List indexCompatList = vc.versions.subList(0, vc.versions.size() - 1) - assertTrue(indexCompatList.containsAll(vc.indexCompatible)) - assertTrue(vc.indexCompatible.containsAll(indexCompatList)) - - List wireCompatList = [Version.fromString("5.2.0"), Version.fromString("5.2.1-SNAPSHOT"), Version.fromString("6.0.0"), - Version.fromString("6.0.1"), Version.fromString("6.1.0"), Version.fromString("6.1.1-SNAPSHOT"), - Version.fromString("6.2.0-SNAPSHOT")] - assertTrue(wireCompatList.containsAll(vc.wireCompatible)) - assertTrue(vc.wireCompatible.containsAll(wireCompatList)) - - assertEquals(vc.snapshotsIndexCompatible.size(), 3) - assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("6.2.0-SNAPSHOT"))) - assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("6.1.1-SNAPSHOT"))) - assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("5.2.1-SNAPSHOT"))) - - assertEquals(vc.snapshotsWireCompatible.size(), 3) - assertTrue(vc.snapshotsWireCompatible.contains(Version.fromString("6.2.0-SNAPSHOT"))) - assertTrue(vc.snapshotsWireCompatible.contains(Version.fromString("6.1.1-SNAPSHOT"))) - assertTrue(vc.snapshotsWireCompatible.contains(Version.fromString("5.2.1-SNAPSHOT"))) - } - - /** - * This validates the logic of being on a unreleased minor branch without a staged minor sibling. This case happens once a staged, - * unreleased minor is released. At the time of this writing 6.2 is unreleased, so adding a 6.2.1 simulates a 6.2 release. This test - * simulates the behavior from 6.3 perspective. - */ - @Test - void testAgainstMinorReleasedBranchNoStagedMinor() { - List localVersion = allVersions.clone() - // remove all the 7.x and add a 6.2.1 which means 6.2 was released - localVersion.removeAll { it.toString().contains('7_0_0')} - localVersion.add(formatVersion('6.2.1')) - VersionCollection vc = new VersionCollection(localVersion) - assertNotNull(vc) - assertEquals(vc.nextMinorSnapshot, null) - assertEquals(vc.stagedMinorSnapshot, null) - assertEquals(vc.nextBugfixSnapshot, Version.fromString("6.2.1-SNAPSHOT")) - assertEquals(vc.maintenanceBugfixSnapshot, Version.fromString("5.2.1-SNAPSHOT")) - - // This should contain the same list sans the current version - List indexCompatList = vc.versions.subList(0, vc.versions.size() - 1) - assertTrue(indexCompatList.containsAll(vc.indexCompatible)) - assertTrue(vc.indexCompatible.containsAll(indexCompatList)) - - List wireCompatList = [Version.fromString("5.2.0"), Version.fromString("5.2.1-SNAPSHOT"), Version.fromString("6.0.0"), - Version.fromString("6.0.1"), Version.fromString("6.1.0"), Version.fromString("6.1.1"), - Version.fromString("6.2.0"), Version.fromString("6.2.1-SNAPSHOT")] - assertTrue(wireCompatList.containsAll(vc.wireCompatible)) - assertTrue(vc.wireCompatible.containsAll(wireCompatList)) - - assertEquals(vc.snapshotsIndexCompatible.size(), 2) - assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("6.2.1-SNAPSHOT"))) - assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("5.2.1-SNAPSHOT"))) - - assertEquals(vc.snapshotsWireCompatible.size(), 2) - assertTrue(vc.snapshotsWireCompatible.contains(Version.fromString("6.2.1-SNAPSHOT"))) - assertTrue(vc.snapshotsWireCompatible.contains(Version.fromString("5.2.1-SNAPSHOT"))) - } - - /** - * This validates the logic of being on a released minor branch. At the time of writing, 6.2 is unreleased, so this is equivalent of being - * on 6.1. - */ - @Test - void testAgainstOldMinor() { - - List localVersion = allVersions.clone() - // remove the 7 alphas and the ones greater than 6.1 - localVersion.removeAll { it.toString().contains('7_0_0') || it.toString().contains('V_6_2') || it.toString().contains('V_6_3') } - VersionCollection vc = new VersionCollection(localVersion) - assertNotNull(vc) - assertEquals(vc.nextMinorSnapshot, null) - assertEquals(vc.stagedMinorSnapshot, null) - assertEquals(vc.nextBugfixSnapshot, null) - assertEquals(vc.maintenanceBugfixSnapshot, Version.fromString("5.2.1-SNAPSHOT")) - - // This should contain the same list sans the current version - List indexCompatList = vc.versions.subList(0, vc.versions.size() - 1) - assertTrue(indexCompatList.containsAll(vc.indexCompatible)) - assertTrue(vc.indexCompatible.containsAll(indexCompatList)) - - List wireCompatList = [Version.fromString("5.2.0"), Version.fromString("5.2.1-SNAPSHOT"), Version.fromString("6.0.0"), - Version.fromString("6.0.1"), Version.fromString("6.1.0")] - assertTrue(wireCompatList.containsAll(vc.wireCompatible)) - assertTrue(vc.wireCompatible.containsAll(wireCompatList)) - - assertEquals(vc.snapshotsIndexCompatible.size(), 1) - assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("5.2.1-SNAPSHOT"))) - - assertEquals(vc.snapshotsWireCompatible.size(), 1) - assertTrue(vc.snapshotsWireCompatible.contains(Version.fromString("5.2.1-SNAPSHOT"))) - } - - /** - * This validates the lower bound of wire compat, which is 5.0. It also validates that the span of 2.x to 5.x if it is decided to port - * this fix all the way to the maint 5.6 release. - */ - @Test - void testFloorOfWireCompatVersions() { - List localVersion = [formatVersion('2.0.0'), formatVersion('2.0.1'), formatVersion('2.1.0'), formatVersion('2.1.1'), - formatVersion('5.0.0'), formatVersion('5.0.1'), formatVersion('5.1.0'), formatVersion('5.1.1'), - formatVersion('5.2.0'),formatVersion('5.2.1'),formatVersion('5.3.0'),formatVersion('5.3.1'), - formatVersion('5.3.2')] - VersionCollection vc = new VersionCollection(localVersion) - assertNotNull(vc) - assertEquals(vc.maintenanceBugfixSnapshot, Version.fromString("2.1.1-SNAPSHOT")) - - // This should contain the same list sans the current version - List indexCompatList = vc.versions.subList(0, vc.versions.size() - 1) - assertTrue(indexCompatList.containsAll(vc.indexCompatible)) - assertTrue(vc.indexCompatible.containsAll(indexCompatList)) - - List wireCompatList = [Version.fromString("2.1.0"), Version.fromString("2.1.1-SNAPSHOT"), Version.fromString("5.0.0"), - Version.fromString("5.0.1"), Version.fromString("5.1.0"), - Version.fromString("5.1.1"), Version.fromString("5.2.0"), Version.fromString("5.2.1"), - Version.fromString("5.3.0"), Version.fromString("5.3.1")] - - List compatible = vc.wireCompatible - assertTrue(wireCompatList.containsAll(compatible)) - assertTrue(vc.wireCompatible.containsAll(wireCompatList)) - - assertEquals(vc.snapshotsIndexCompatible.size(), 1) - assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("2.1.1-SNAPSHOT"))) - - // ensure none of the 2.x snapshots appear here, as this is the floor of bwc for wire compat - assertEquals(vc.snapshotsWireCompatible.size(), 0) - } -} diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/VersionCollectionTests.java b/buildSrc/src/test/java/org/elasticsearch/gradle/VersionCollectionTests.java new file mode 100644 index 00000000000..5e83462cf5c --- /dev/null +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/VersionCollectionTests.java @@ -0,0 +1,406 @@ +package org.elasticsearch.gradle; + +import org.elasticsearch.gradle.test.GradleUnitTestCase; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static java.util.Arrays.asList; +import static java.util.Collections.singletonList; + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +public class VersionCollectionTests extends GradleUnitTestCase { + + private static final Map> sampleVersions = new HashMap<>(); + + @Rule + public ExpectedException expectedEx = ExpectedException.none(); + + static { + // unreleased major and two unreleased minors ( minor in feature freeze ) + sampleVersions.put("8.0.0", asList( + "7_0_0", "7_0_1", "7_1_0", "7_1_1", "7_2_0", "7_3_0", "8.0.0" + )); + sampleVersions.put("7.0.0-alpha1", asList( + "6_0_0_alpha1", "6_0_0_alpha2", "6_0_0_beta1", "6_0_0_beta2", "6_0_0_rc1", "6_0_0_rc2", + "6_0_0", "6_0_1", "6_1_0", "6_1_1", "6_1_2", "6_1_3", "6_1_4", + "6_2_0", "6_2_1", "6_2_2", "6_2_3", "6_2_4", + "6_3_0", "6_3_1", "6_3_2", + "6_4_0", "6_4_1", "6_4_2", + "6_5_0", "7_0_0_alpha1" + )); + sampleVersions.put("6.5.0", asList( + "5_0_0_alpha1", "5_0_0_alpha2", "5_0_0_alpha3", "5_0_0_alpha4", "5_0_0_alpha5", "5_0_0_beta1", "5_0_0_rc1", + "5_0_0", "5_0_1", "5_0_2", "5_1_1", "5_1_2", "5_2_0", "5_2_1", "5_2_2", "5_3_0", "5_3_1", "5_3_2", "5_3_3", + "5_4_0", "5_4_1", "5_4_2", "5_4_3", "5_5_0", "5_5_1", "5_5_2", "5_5_3", "5_6_0", "5_6_1", "5_6_2", "5_6_3", + "5_6_4", "5_6_5", "5_6_6", "5_6_7", "5_6_8", "5_6_9", "5_6_10", "5_6_11", "5_6_12", "5_6_13", + "6_0_0_alpha1", "6_0_0_alpha2", "6_0_0_beta1", "6_0_0_beta2", "6_0_0_rc1", "6_0_0_rc2", "6_0_0", "6_0_1", + "6_1_0", "6_1_1", "6_1_2", "6_1_3", "6_1_4", "6_2_0", "6_2_1", "6_2_2", "6_2_3", "6_2_4", "6_3_0", "6_3_1", + "6_3_2", "6_4_0", "6_4_1", "6_4_2", "6_5_0" + )); + sampleVersions.put("6.6.0", asList( + "5_0_0_alpha1", "5_0_0_alpha2", "5_0_0_alpha3", "5_0_0_alpha4", "5_0_0_alpha5", "5_0_0_beta1", "5_0_0_rc1", + "5_0_0", "5_0_1", "5_0_2", "5_1_1", "5_1_2", "5_2_0", "5_2_1", "5_2_2", "5_3_0", "5_3_1", "5_3_2", "5_3_3", + "5_4_0", "5_4_1", "5_4_2", "5_4_3", "5_5_0", "5_5_1", "5_5_2", "5_5_3", "5_6_0", "5_6_1", "5_6_2", "5_6_3", + "5_6_4", "5_6_5", "5_6_6", "5_6_7", "5_6_8", "5_6_9", "5_6_10", "5_6_11", "5_6_12", "5_6_13", + "6_0_0_alpha1", "6_0_0_alpha2", "6_0_0_beta1", "6_0_0_beta2", "6_0_0_rc1", "6_0_0_rc2", "6_0_0", "6_0_1", + "6_1_0", "6_1_1", "6_1_2", "6_1_3", "6_1_4", "6_2_0", "6_2_1", "6_2_2", "6_2_3", "6_2_4", "6_3_0", "6_3_1", + "6_3_2", "6_4_0", "6_4_1", "6_4_2", "6_5_0", "6_6_0" + )); + sampleVersions.put("6.4.2", asList( + "5_0_0_alpha1", "5_0_0_alpha2", "5_0_0_alpha3", "5_0_0_alpha4", "5_0_0_alpha5", "5_0_0_beta1", "5_0_0_rc1", + "5_0_0", "5_0_1", "5_0_2", "5_1_1", "5_1_2", "5_2_0", "5_2_1", "5_2_2", "5_3_0", + "5_3_1", "5_3_2", "5_3_3", "5_4_0", "5_4_1", "5_4_2", "5_4_3", "5_5_0", "5_5_1", "5_5_2", "5_5_3", + "5_6_0", "5_6_1", "5_6_2", "5_6_3", "5_6_4", "5_6_5", "5_6_6", "5_6_7", "5_6_8", "5_6_9", "5_6_10", + "5_6_11", "5_6_12", "5_6_13", + "6_0_0_alpha1", "6_0_0_alpha2", "6_0_0_beta1", "6_0_0_beta2", "6_0_0_rc1", "6_0_0_rc2", + "6_0_0", "6_0_1", "6_1_0", "6_1_1", "6_1_2", "6_1_3", "6_1_4", "6_2_0", "6_2_1", "6_2_2", "6_2_3", + "6_2_4", "6_3_0", "6_3_1", "6_3_2", "6_4_0", "6_4_1", "6_4_2" + )); + } + + @Test(expected = IllegalArgumentException.class) + public void testExceptionOnEmpty() { + new VersionCollection(asList("foo", "bar"), Version.fromString("7.0.0")); + } + + @Test(expected = IllegalStateException.class) + public void testExceptionOnNonCurrent() { + new VersionCollection(singletonList(formatVersionToLine("6.5.0")), Version.fromString("7.0.0")); + } + + @Test(expected = IllegalStateException.class) + public void testExceptionOnTooManyMajors() { + new VersionCollection( + asList( + formatVersionToLine("5.6.12"), + formatVersionToLine("6.5.0"), + formatVersionToLine("7.0.0") + ), + Version.fromString("7.0.0") + ); + } + + public void testWireCompatible() { + assertVersionsEquals( + singletonList("6.5.0-SNAPSHOT"), + getVersionCollection("7.0.0-alpha1").getWireCompatible() + ); + assertVersionsEquals( + asList( + "5.6.0", "5.6.1", "5.6.2", "5.6.3", "5.6.4", "5.6.5", "5.6.6", "5.6.7", "5.6.8", "5.6.9", "5.6.10", + "5.6.11", "5.6.12", "5.6.13-SNAPSHOT", + "6.0.0", "6.0.1", "6.1.0", "6.1.1", "6.1.2", "6.1.3", "6.1.4", + "6.2.0", "6.2.1", "6.2.2", "6.2.3", "6.2.4", + "6.3.0", "6.3.1", "6.3.2", "6.4.0", "6.4.1", "6.4.2-SNAPSHOT" + ), + getVersionCollection("6.5.0").getWireCompatible() + ); + + assertVersionsEquals( + asList( + "5.6.0", "5.6.1", "5.6.2", "5.6.3", "5.6.4", "5.6.5", "5.6.6", "5.6.7", "5.6.8", "5.6.9", "5.6.10", + "5.6.11", "5.6.12", "5.6.13-SNAPSHOT", "6.0.0", "6.0.1", "6.1.0", "6.1.1", "6.1.2", "6.1.3", "6.1.4", + "6.2.0", "6.2.1", "6.2.2", "6.2.3", "6.2.4", "6.3.0", "6.3.1", "6.3.2", "6.4.0", "6.4.1" + ), + getVersionCollection("6.4.2").getWireCompatible() + ); + + assertVersionsEquals( + asList( + "5.6.0", "5.6.1", "5.6.2", "5.6.3", "5.6.4", "5.6.5", "5.6.6", "5.6.7", "5.6.8", "5.6.9", "5.6.10", + "5.6.11", "5.6.12", "5.6.13-SNAPSHOT", + "6.0.0", "6.0.1", "6.1.0", "6.1.1", "6.1.2", "6.1.3", "6.1.4", + "6.2.0", "6.2.1", "6.2.2", "6.2.3", "6.2.4", + "6.3.0", "6.3.1", "6.3.2", "6.4.0", "6.4.1", "6.4.2-SNAPSHOT", "6.5.0-SNAPSHOT" + ), + getVersionCollection("6.6.0").getWireCompatible() + ); + + assertVersionsEquals( + singletonList("7.3.0"), + getVersionCollection("8.0.0").getWireCompatible() + ); + } + + public void testWireCompatibleUnreleased() { + assertVersionsEquals( + singletonList("6.5.0-SNAPSHOT"), + getVersionCollection("7.0.0-alpha1").getUnreleasedWireCompatible() + ); + assertVersionsEquals( + asList("5.6.13-SNAPSHOT", "6.4.2-SNAPSHOT"), + getVersionCollection("6.5.0").getUnreleasedWireCompatible() + ); + + assertVersionsEquals( + singletonList("5.6.13-SNAPSHOT"), + getVersionCollection("6.4.2").getUnreleasedWireCompatible() + ); + + assertVersionsEquals( + asList("5.6.13-SNAPSHOT", "6.4.2-SNAPSHOT", "6.5.0-SNAPSHOT"), + getVersionCollection("6.6.0").getUnreleasedWireCompatible() + ); + + assertVersionsEquals( + singletonList("7.3.0"), + getVersionCollection("8.0.0").getUnreleasedWireCompatible() + ); + } + + public void testIndexCompatible() { + assertVersionsEquals( + asList( + "6.0.0", "6.0.1", "6.1.0", "6.1.1", "6.1.2", "6.1.3", "6.1.4", + "6.2.0", "6.2.1", "6.2.2", "6.2.3", "6.2.4", "6.3.0", "6.3.1", + "6.3.2", "6.4.0", "6.4.1", "6.4.2-SNAPSHOT", "6.5.0-SNAPSHOT" + ), + getVersionCollection("7.0.0-alpha1").getIndexCompatible() + ); + + assertVersionsEquals( + asList( + "5.0.0", "5.0.1", "5.0.2", "5.1.1", "5.1.2", "5.2.0", "5.2.1", "5.2.2", "5.3.0", "5.3.1", "5.3.2", "5.3.3", + "5.4.0", "5.4.1", "5.4.2", "5.4.3", "5.5.0", "5.5.1", "5.5.2", "5.5.3", "5.6.0", "5.6.1", "5.6.2", "5.6.3", + "5.6.4", "5.6.5", "5.6.6", "5.6.7", "5.6.8", "5.6.9", "5.6.10", "5.6.11", "5.6.12", "5.6.13-SNAPSHOT", + "6.0.0", "6.0.1", "6.1.0", "6.1.1", "6.1.2", "6.1.3", "6.1.4", "6.2.0", "6.2.1", "6.2.2", "6.2.3", "6.2.4", + "6.3.0", "6.3.1", "6.3.2", "6.4.0", "6.4.1", "6.4.2-SNAPSHOT" + ), + getVersionCollection("6.5.0").getIndexCompatible() + ); + + assertVersionsEquals( + asList( + "5.0.0", "5.0.1", "5.0.2", "5.1.1", "5.1.2", "5.2.0", "5.2.1", "5.2.2", "5.3.0", "5.3.1", "5.3.2", "5.3.3", + "5.4.0", "5.4.1", "5.4.2", "5.4.3", "5.5.0", "5.5.1", "5.5.2", "5.5.3", "5.6.0", "5.6.1", "5.6.2", "5.6.3", + "5.6.4", "5.6.5", "5.6.6", "5.6.7", "5.6.8", "5.6.9", "5.6.10", "5.6.11", "5.6.12", "5.6.13-SNAPSHOT", + "6.0.0", "6.0.1", "6.1.0", "6.1.1", "6.1.2", "6.1.3", "6.1.4", "6.2.0", "6.2.1", "6.2.2", "6.2.3", "6.2.4", + "6.3.0", "6.3.1", "6.3.2", "6.4.0", "6.4.1" + ), + getVersionCollection("6.4.2").getIndexCompatible() + ); + + assertVersionsEquals( + asList( + "5.0.0", "5.0.1", "5.0.2", "5.1.1", "5.1.2", "5.2.0", "5.2.1", "5.2.2", "5.3.0", "5.3.1", "5.3.2", "5.3.3", + "5.4.0", "5.4.1", "5.4.2", "5.4.3", "5.5.0", "5.5.1", "5.5.2", "5.5.3", "5.6.0", "5.6.1", "5.6.2", "5.6.3", + "5.6.4", "5.6.5", "5.6.6", "5.6.7", "5.6.8", "5.6.9", "5.6.10", "5.6.11", "5.6.12", "5.6.13-SNAPSHOT", + "6.0.0", "6.0.1", "6.1.0", "6.1.1", "6.1.2", "6.1.3", "6.1.4", "6.2.0", "6.2.1", "6.2.2", "6.2.3", "6.2.4", + "6.3.0", "6.3.1", "6.3.2", "6.4.0", "6.4.1", "6.4.2-SNAPSHOT", "6.5.0-SNAPSHOT" + ), + getVersionCollection("6.6.0").getIndexCompatible() + ); + + assertVersionsEquals( + asList("7.0.0", "7.0.1", "7.1.0", "7.1.1", "7.2.0", "7.3.0"), + getVersionCollection("8.0.0").getIndexCompatible() + ); + } + + public void testIndexCompatibleUnreleased() { + assertVersionsEquals( + asList("6.4.2-SNAPSHOT", "6.5.0-SNAPSHOT"), + getVersionCollection("7.0.0-alpha1").getUnreleasedIndexCompatible() + ); + + assertVersionsEquals( + asList("5.6.13-SNAPSHOT", "6.4.2-SNAPSHOT"), + getVersionCollection("6.5.0").getUnreleasedIndexCompatible() + ); + + assertVersionsEquals( + singletonList("5.6.13-SNAPSHOT"), + getVersionCollection("6.4.2").getUnreleasedIndexCompatible() + ); + + assertVersionsEquals( + asList("5.6.13-SNAPSHOT", "6.4.2-SNAPSHOT", "6.5.0-SNAPSHOT"), + getVersionCollection("6.6.0").getUnreleasedIndexCompatible() + ); + + assertVersionsEquals( + asList("7.1.1", "7.2.0", "7.3.0"), + getVersionCollection("8.0.0").getUnreleasedIndexCompatible() + ); + } + + public void testGetUnreleased() { + assertVersionsEquals( + asList("6.4.2", "6.5.0", "7.0.0-alpha1"), + getVersionCollection("7.0.0-alpha1").getUnreleased() + ); + assertVersionsEquals( + asList("5.6.13", "6.4.2", "6.5.0"), + getVersionCollection("6.5.0").getUnreleased() + ); + assertVersionsEquals( + asList("5.6.13", "6.4.2"), + getVersionCollection("6.4.2").getUnreleased() + ); + assertVersionsEquals( + asList("5.6.13", "6.4.2", "6.5.0", "6.6.0"), + getVersionCollection("6.6.0").getUnreleased() + ); + assertVersionsEquals( + asList("7.1.1", "7.2.0", "7.3.0", "8.0.0"), + getVersionCollection("8.0.0").getUnreleased() + ); + } + + public void testGetBranch() { + assertUnreleasedBranchNames( + asList("6.4", "6.x"), + getVersionCollection("7.0.0-alpha1") + ); + assertUnreleasedBranchNames( + asList("5.6", "6.4"), + getVersionCollection("6.5.0") + ); + assertUnreleasedBranchNames( + singletonList("5.6"), + getVersionCollection("6.4.2") + ); + assertUnreleasedBranchNames( + asList("5.6", "6.4", "6.5"), + getVersionCollection("6.6.0") + ); + assertUnreleasedBranchNames( + asList("7.1", "7.2", "7.x"), + getVersionCollection("8.0.0") + ); + } + + public void testGetGradleProjectName() { + assertUnreleasedGradleProjectNames( + asList("bugfix", "minor"), + getVersionCollection("7.0.0-alpha1") + ); + assertUnreleasedGradleProjectNames( + asList("maintenance", "bugfix"), + getVersionCollection("6.5.0") + ); + assertUnreleasedGradleProjectNames( + singletonList("maintenance"), + getVersionCollection("6.4.2") + ); + assertUnreleasedGradleProjectNames( + asList("maintenance", "bugfix", "staged"), + getVersionCollection("6.6.0") + ); + assertUnreleasedGradleProjectNames( + asList("bugfix", "staged", "minor"), + getVersionCollection("8.0.0") + ); + } + + public void testCompareToAuthoritative() { + List listOfVersions = asList("7.0.0", "7.0.1", "7.1.0", "7.1.1", "7.2.0", "7.3.0", "8.0.0"); + List authoritativeReleasedVersions = Stream.of("7.0.0", "7.0.1", "7.1.0") + .map(Version::fromString) + .collect(Collectors.toList()); + + VersionCollection vc = new VersionCollection( + listOfVersions.stream() + .map(this::formatVersionToLine) + .collect(Collectors.toList()), + Version.fromString("8.0.0") + ); + vc.compareToAuthoritative(authoritativeReleasedVersions); + } + + public void testCompareToAuthoritativeUnreleasedActuallyReleased() { + List listOfVersions = asList("7.0.0", "7.0.1", "7.1.0", "7.1.1", "7.2.0", "7.3.0", "8.0.0"); + List authoritativeReleasedVersions = Stream.of("7.0.0", "7.0.1", "7.1.0", "7.1.1", "8.0.0") + .map(Version::fromString) + .collect(Collectors.toList()); + + VersionCollection vc = new VersionCollection( + listOfVersions.stream() + .map(this::formatVersionToLine) + .collect(Collectors.toList()), + Version.fromString("8.0.0") + ); + expectedEx.expect(IllegalStateException.class); + expectedEx.expectMessage("but they are released"); + vc.compareToAuthoritative(authoritativeReleasedVersions); + } + + public void testCompareToAuthoritativeNotReallyRelesed() { + List listOfVersions = asList("7.0.0", "7.0.1", "7.1.0", "7.1.1", "7.2.0", "7.3.0", "8.0.0"); + List authoritativeReleasedVersions = Stream.of("7.0.0", "7.0.1") + .map(Version::fromString) + .collect(Collectors.toList()); + VersionCollection vc = new VersionCollection( + listOfVersions.stream() + .map(this::formatVersionToLine) + .collect(Collectors.toList()), + Version.fromString("8.0.0") + ); + expectedEx.expect(IllegalStateException.class); + expectedEx.expectMessage("not really released"); + vc.compareToAuthoritative(authoritativeReleasedVersions); + } + + private void assertUnreleasedGradleProjectNames(List expectedNAmes, VersionCollection versionCollection) { + List actualNames = new ArrayList<>(); + versionCollection.forPreviousUnreleased(unreleasedVersion -> + actualNames.add(unreleasedVersion.gradleProjectName) + ); + assertEquals(expectedNAmes, actualNames); + } + + private void assertUnreleasedBranchNames(List expectedBranches, VersionCollection versionCollection) { + List actualBranches = new ArrayList<>(); + versionCollection.forPreviousUnreleased(unreleasedVersionInfo -> + actualBranches.add(unreleasedVersionInfo.branch) + ); + assertEquals(expectedBranches, actualBranches); + } + + private String formatVersionToLine(final String version) { + return " public static final Version V_" + version.replaceAll("\\.", "_") + " "; + } + + private void assertVersionsEquals(List expected, List actual) { + assertEquals( + expected.stream() + .map(Version::fromString) + .collect(Collectors.toList()), + actual + ); + } + + private VersionCollection getVersionCollection(String currentVersion) { + return new VersionCollection( + sampleVersions.get(currentVersion).stream() + .map(this::formatVersionToLine) + .collect(Collectors.toList()), + Version.fromString(currentVersion) + ); + } +} diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/VersionTests.java b/buildSrc/src/test/java/org/elasticsearch/gradle/VersionTests.java index d3c3b4a43cb..fb4e9d70e3a 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/VersionTests.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/VersionTests.java @@ -44,16 +44,12 @@ public class VersionTests extends GradleUnitTestCase { assertTrue("1.10.20 is not interpreted as before 2.0.0", Version.fromString("1.10.20").before("2.0.0") ); - assertTrue("7.0.0-alpha1 is not interpreted as before 7.0.0-alpha2", - Version.fromString("7.0.0-alpha1").before("7.0.0-alpha2") - ); assertTrue("7.0.0-alpha1 should be equal to 7.0.0-alpha1", Version.fromString("7.0.0-alpha1").equals(Version.fromString("7.0.0-alpha1")) ); assertTrue("7.0.0-SNAPSHOT should be equal to 7.0.0-SNAPSHOT", Version.fromString("7.0.0-SNAPSHOT").equals(Version.fromString("7.0.0-SNAPSHOT")) ); - assertEquals(Version.fromString("5.2.1-SNAPSHOT"), Version.fromString("5.2.1-SNAPSHOT")); } public void testCollections() { @@ -89,51 +85,10 @@ public class VersionTests extends GradleUnitTestCase { new Version(7, 0, 0, "", true) )); - // snapshot is not taken into account TODO inconsistent with equals assertEquals( 0, - new Version(7, 0, 0, "", false).compareTo( - new Version(7, 0, 0, null, true)) - ); - // without sufix is smaller than with TODO - assertOrder( - new Version(7, 0, 0, null, false), - new Version(7, 0, 0, "-alpha1", false) - ); - // numbered sufix - assertOrder( - new Version(7, 0, 0, "-alpha1", false), - new Version(7, 0, 0, "-alpha2", false) - ); - // ranked sufix - assertOrder( - new Version(7, 0, 0, "-alpha8", false), - new Version(7, 0, 0, "-rc1", false) - ); - // ranked sufix - assertOrder( - new Version(7, 0, 0, "-alpha8", false), - new Version(7, 0, 0, "-beta1", false) - ); - // ranked sufix - assertOrder( - new Version(7, 0, 0, "-beta8", false), - new Version(7, 0, 0, "-rc1", false) - ); - // major takes precedence - assertOrder( - new Version(6, 10, 10, "-alpha8", true), - new Version(7, 0, 0, "-alpha2", false) - ); - // then minor - assertOrder( - new Version(7, 0, 10, "-alpha8", true), - new Version(7, 1, 0, "-alpha2", false) - ); - // then revision - assertOrder( - new Version(7, 1, 0, "-alpha8", true), - new Version(7, 1, 10, "-alpha2", false) + new Version(7, 0, 0, "-alpha1", false).compareTo( + new Version(7, 0, 0, "", true)) ); } @@ -149,18 +104,6 @@ public class VersionTests extends GradleUnitTestCase { Version.fromString("foo.bar.baz"); } - public void testExceptionSuffixNumber() { - expectedEx.expect(IllegalArgumentException.class); - expectedEx.expectMessage("Invalid suffix"); - new Version(7, 1, 1, "-alpha", true); - } - - public void testExceptionSuffix() { - expectedEx.expect(IllegalArgumentException.class); - expectedEx.expectMessage("Suffix must contain one of:"); - new Version(7, 1, 1, "foo1", true); - } - private void assertOrder(Version smaller, Version bigger) { assertEquals(smaller + " should be smaller than " + bigger, -1, smaller.compareTo(bigger)); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index 832aba51e2b..053f46f8496 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -522,6 +522,9 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { IndicesAliasesRequest aliasesAddRequest = new IndicesAliasesRequest(); AliasActions addAction = new AliasActions(AliasActions.Type.ADD).index(index).aliases(alias); + if (randomBoolean()) { + addAction.writeIndex(randomBoolean()); + } addAction.routing("routing").searchRouting("search_routing").filter("{\"term\":{\"year\":2016}}"); aliasesAddRequest.addAliasAction(addAction); AcknowledgedResponse aliasesAddResponse = execute(aliasesAddRequest, highLevelClient().indices()::updateAliases, @@ -535,6 +538,8 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { Map filter = (Map) getAlias.get("filter"); Map term = (Map) filter.get("term"); assertEquals(2016, term.get("year")); + Boolean isWriteIndex = (Boolean) getAlias.get("is_write_index"); + assertThat(isWriteIndex, equalTo(addAction.writeIndex())); String alias2 = "alias2"; IndicesAliasesRequest aliasesAddRemoveRequest = new IndicesAliasesRequest(); diff --git a/distribution/bwc/maintenance-bugfix-snapshot/build.gradle b/distribution/bwc/bugfix/build.gradle similarity index 100% rename from distribution/bwc/maintenance-bugfix-snapshot/build.gradle rename to distribution/bwc/bugfix/build.gradle diff --git a/distribution/bwc/build.gradle b/distribution/bwc/build.gradle index a44e670542b..43ebe53e03a 100644 --- a/distribution/bwc/build.gradle +++ b/distribution/bwc/build.gradle @@ -17,236 +17,225 @@ * under the License. */ - - import org.apache.tools.ant.taskdefs.condition.Os import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.Version +import org.elasticsearch.gradle.VersionCollection import java.nio.charset.StandardCharsets import static org.elasticsearch.gradle.BuildPlugin.getJavaHome + /** - * This is a dummy project which does a local checkout of the previous - * wire compat version's branch, and builds a snapshot. This allows backcompat - * tests to test against the next unreleased version, closest to this version, - * without relying on snapshots. + * We want to be able to do BWC tests for unreleased versions without relying on and waiting for snapshots. + * For this we need to check out and build the unreleased versions. + * Since These depend on the current version, we can't name the Gradle projects statically, and don't know what the + * unreleased versions are when Gradle projects are set up, so we use "build-unreleased-version-*" as placeholders + * and configure them to build various versions here. */ -subprojects { +bwcVersions.forPreviousUnreleased { VersionCollection.UnreleasedVersionInfo unreleasedVersion -> project("${project.path}:${unreleasedVersion.gradleProjectName}") { + Version bwcVersion = unreleasedVersion.version + String bwcBranch = unreleasedVersion.branch + apply plugin: 'distribution' + // Not published so no need to assemble + assemble.enabled = false + assemble.dependsOn.remove('buildBwcVersion') - Version bwcVersion = bwcVersions.getSnapshotForProject(project.name) - if (bwcVersion == null) { - // this project wont do anything - return - } + File checkoutDir = file("${buildDir}/bwc/checkout-${bwcBranch}") - String bwcBranch - if (project.name == 'next-minor-snapshot') { - // this is always a .x series - bwcBranch = "${bwcVersion.major}.x" - } else { - bwcBranch = "${bwcVersion.major}.${bwcVersion.minor}" - } + final String remote = System.getProperty("tests.bwc.remote", "elastic") - apply plugin: 'distribution' - // Not published so no need to assemble - assemble.enabled = false - assemble.dependsOn.remove('buildBwcVersion') - - File checkoutDir = file("${buildDir}/bwc/checkout-${bwcBranch}") - - final String remote = System.getProperty("tests.bwc.remote", "elastic") - - final boolean gitFetchLatest - final String gitFetchLatestProperty = System.getProperty("tests.bwc.git_fetch_latest", "true") - if ("true".equals(gitFetchLatestProperty)) { - gitFetchLatest = true - } else if ("false".equals(gitFetchLatestProperty)) { - gitFetchLatest = false - } else { - throw new GradleException("tests.bwc.git_fetch_latest must be [true] or [false] but was [" + gitFetchLatestProperty + "]") - } - - task createClone(type: LoggedExec) { - onlyIf { checkoutDir.exists() == false } - commandLine = ['git', 'clone', rootDir, checkoutDir] - } - - task findRemote(type: LoggedExec) { - dependsOn createClone - workingDir = checkoutDir - commandLine = ['git', 'remote', '-v'] - ByteArrayOutputStream output = new ByteArrayOutputStream() - standardOutput = output - doLast { - project.ext.remoteExists = false - output.toString('UTF-8').eachLine { - if (it.contains("${remote}\t")) { - project.ext.remoteExists = true - } - } - } - } - - task addRemote(type: LoggedExec) { - dependsOn findRemote - onlyIf { project.ext.remoteExists == false } - workingDir = checkoutDir - commandLine = ['git', 'remote', 'add', "${remote}", "https://github.com/${remote}/elasticsearch.git"] - } - - task fetchLatest(type: LoggedExec) { - onlyIf { project.gradle.startParameter.isOffline() == false && gitFetchLatest } - dependsOn addRemote - workingDir = checkoutDir - commandLine = ['git', 'fetch', '--all'] - } - - String buildMetadataKey = "bwc_refspec_${project.path.substring(1)}" - task checkoutBwcBranch(type: LoggedExec) { - String refspec = System.getProperty("tests.bwc.refspec.${bwcBranch}", buildMetadata.get(buildMetadataKey, "${remote}/${bwcBranch}")) - dependsOn fetchLatest - workingDir = checkoutDir - commandLine = ['git', 'checkout', refspec] - doFirst { - println "Checking out elasticsearch ${refspec} for branch ${bwcBranch}" - } - } - - File buildMetadataFile = project.file("build/${project.name}/build_metadata") - task writeBuildMetadata(type: LoggedExec) { - dependsOn checkoutBwcBranch - workingDir = checkoutDir - commandLine = ['git', 'rev-parse', 'HEAD'] - ignoreExitValue = true - ByteArrayOutputStream output = new ByteArrayOutputStream() - standardOutput = output - doLast { - if (execResult.exitValue != 0) { - output.toString('UTF-8').eachLine { line -> logger.error(line) } - execResult.assertNormalExitValue() - } - project.mkdir(buildMetadataFile.parent) - String commit = output.toString('UTF-8') - buildMetadataFile.setText("${buildMetadataKey}=${commit}", 'UTF-8') - println "Checked out elasticsearch commit ${commit}" - } - } - - List artifactFiles = [] - List projectDirs = [] - for (String project : ['zip', 'deb', 'rpm']) { - String baseDir = "distribution" - if (bwcVersion.onOrAfter('6.3.0')) { - baseDir += project == 'zip' ? '/archives' : '/packages' - // add oss variant first - projectDirs.add("${baseDir}/oss-${project}") - artifactFiles.add(file("${checkoutDir}/${baseDir}/oss-${project}/build/distributions/elasticsearch-oss-${bwcVersion}.${project}")) - } - projectDirs.add("${baseDir}/${project}") - artifactFiles.add(file("${checkoutDir}/${baseDir}/${project}/build/distributions/elasticsearch-${bwcVersion}.${project}")) - } - - task buildBwcVersion(type: Exec) { - dependsOn checkoutBwcBranch, writeBuildMetadata - workingDir = checkoutDir - doFirst { - // Execution time so that the checkouts are available - List lines = file("$checkoutDir/.ci/java-versions.properties").readLines() - environment( - 'JAVA_HOME', - getJavaHome(it, Integer.parseInt( - lines - .findAll({ it.startsWith("ES_BUILD_JAVA=java") }) - .collect({ it.replace("ES_BUILD_JAVA=java", "").trim() }) - .join("!!") - )) - ) - environment( - 'RUNTIME_JAVA_HOME', - getJavaHome(it, Integer.parseInt( - lines - .findAll({ it.startsWith("ES_RUNTIME_JAVA=java") }) - .collect({ it.replace("ES_RUNTIME_JAVA=java", "").trim() }) - .join("!!") - )) - ) - } - - if (Os.isFamily(Os.FAMILY_WINDOWS)) { - executable 'cmd' - args '/C', 'call', new File(checkoutDir, 'gradlew').toString() + final boolean gitFetchLatest + final String gitFetchLatestProperty = System.getProperty("tests.bwc.git_fetch_latest", "true") + if ("true".equals(gitFetchLatestProperty)) { + gitFetchLatest = true + } else if ("false".equals(gitFetchLatestProperty)) { + gitFetchLatest = false } else { - executable new File(checkoutDir, 'gradlew').toString() + throw new GradleException("tests.bwc.git_fetch_latest must be [true] or [false] but was [" + gitFetchLatestProperty + "]") } - if (gradle.startParameter.isOffline()) { - args "--offline" - } - for (String dir : projectDirs) { - args ":${dir.replace('/', ':')}:assemble" - } - args "-Dbuild.snapshot=true" - final LogLevel logLevel = gradle.startParameter.logLevel - if ([LogLevel.QUIET, LogLevel.WARN, LogLevel.INFO, LogLevel.DEBUG].contains(logLevel)) { - args "--${logLevel.name().toLowerCase(Locale.ENGLISH)}" - } - final String showStacktraceName = gradle.startParameter.showStacktrace.name() - assert ["INTERNAL_EXCEPTIONS", "ALWAYS", "ALWAYS_FULL"].contains(showStacktraceName) - if (showStacktraceName.equals("ALWAYS")) { - args "--stacktrace" - } else if (showStacktraceName.equals("ALWAYS_FULL")) { - args "--full-stacktrace" - } - standardOutput = new IndentingOutputStream(System.out) - errorOutput = new IndentingOutputStream(System.err) - doLast { - List missing = artifactFiles.grep { file -> - false == file.exists() - } - if (false == missing.empty) { - throw new InvalidUserDataException( - "Building bwc version didn't generate expected files ${missing}") - } - } - } - if (gradle.startParameter.taskNames == ["assemble"]) { - // Gradle needs the `artifacts` declaration, including `builtBy` bellow to make projects dependencies on this - // project work, but it will also trigger the build of these for the `assemble` task. - // Since these are only used for testing, we don't want to assemble them if `assemble` is the single command being - // ran. - logger.info("Skipping BWC builds since `assemble` is the only task name provided on the command line") - } else { - artifacts { - for (File artifactFile : artifactFiles) { - String artifactName = artifactFile.name.contains('oss') ? 'elasticsearch-oss' : 'elasticsearch' - String suffix = artifactFile.toString()[-3..-1] - 'default' file: artifactFile, name: artifactName, type: suffix, builtBy: buildBwcVersion - } + task createClone(type: LoggedExec) { + onlyIf { checkoutDir.exists() == false } + commandLine = ['git', 'clone', rootDir, checkoutDir] } - } -} + + task findRemote(type: LoggedExec) { + dependsOn createClone + workingDir = checkoutDir + commandLine = ['git', 'remote', '-v'] + ByteArrayOutputStream output = new ByteArrayOutputStream() + standardOutput = output + doLast { + project.ext.remoteExists = false + output.toString('UTF-8').eachLine { + if (it.contains("${remote}\t")) { + project.ext.remoteExists = true + } + } + } + } + + task addRemote(type: LoggedExec) { + dependsOn findRemote + onlyIf { project.ext.remoteExists == false } + workingDir = checkoutDir + commandLine = ['git', 'remote', 'add', "${remote}", "https://github.com/${remote}/elasticsearch.git"] + } + + task fetchLatest(type: LoggedExec) { + onlyIf { project.gradle.startParameter.isOffline() == false && gitFetchLatest } + dependsOn addRemote + workingDir = checkoutDir + commandLine = ['git', 'fetch', '--all'] + } + + String buildMetadataKey = "bwc_refspec_${project.path.substring(1)}" + task checkoutBwcBranch(type: LoggedExec) { + String refspec = System.getProperty("tests.bwc.refspec.${bwcBranch}", buildMetadata.get(buildMetadataKey, "${remote}/${bwcBranch}")) + dependsOn fetchLatest + workingDir = checkoutDir + commandLine = ['git', 'checkout', refspec] + doFirst { + println "Checking out elasticsearch ${refspec} for branch ${bwcBranch}" + } + } + + File buildMetadataFile = project.file("build/${project.name}/build_metadata") + task writeBuildMetadata(type: LoggedExec) { + dependsOn checkoutBwcBranch + workingDir = checkoutDir + commandLine = ['git', 'rev-parse', 'HEAD'] + ignoreExitValue = true + ByteArrayOutputStream output = new ByteArrayOutputStream() + standardOutput = output + doLast { + if (execResult.exitValue != 0) { + output.toString('UTF-8').eachLine { line -> logger.error(line) } + execResult.assertNormalExitValue() + } + project.mkdir(buildMetadataFile.parent) + String commit = output.toString('UTF-8') + buildMetadataFile.setText("${buildMetadataKey}=${commit}", 'UTF-8') + println "Checked out elasticsearch commit ${commit}" + } + } + + List artifactFiles = [] + List projectDirs = [] + for (String project : ['zip', 'deb', 'rpm']) { + String baseDir = "distribution" + if (bwcVersion.onOrAfter('6.3.0')) { + baseDir += project == 'zip' ? '/archives' : '/packages' + // add oss variant first + projectDirs.add("${baseDir}/oss-${project}") + artifactFiles.add(file("${checkoutDir}/${baseDir}/oss-${project}/build/distributions/elasticsearch-oss-${bwcVersion}.${project}")) + } + projectDirs.add("${baseDir}/${project}") + artifactFiles.add(file("${checkoutDir}/${baseDir}/${project}/build/distributions/elasticsearch-${bwcVersion}.${project}")) + } + + task buildBwcVersion(type: Exec) { + dependsOn checkoutBwcBranch, writeBuildMetadata + workingDir = checkoutDir + doFirst { + // Execution time so that the checkouts are available + List lines = file("${checkoutDir}/.ci/java-versions.properties").readLines() + environment( + 'JAVA_HOME', + getJavaHome(it, Integer.parseInt( + lines + .findAll({ it.startsWith("ES_BUILD_JAVA=java") }) + .collect({ it.replace("ES_BUILD_JAVA=java", "").trim() }) + .join("!!") + )) + ) + environment( + 'RUNTIME_JAVA_HOME', + getJavaHome(it, Integer.parseInt( + lines + .findAll({ it.startsWith("ES_RUNTIME_JAVA=java") }) + .collect({ it.replace("ES_RUNTIME_JAVA=java", "").trim() }) + .join("!!") + )) + ) + } + + if (Os.isFamily(Os.FAMILY_WINDOWS)) { + executable 'cmd' + args '/C', 'call', new File(checkoutDir, 'gradlew').toString() + } else { + executable new File(checkoutDir, 'gradlew').toString() + } + if (gradle.startParameter.isOffline()) { + args "--offline" + } + for (String dir : projectDirs) { + args ":${dir.replace('/', ':')}:assemble" + } + args "-Dbuild.snapshot=true" + final LogLevel logLevel = gradle.startParameter.logLevel + if ([LogLevel.QUIET, LogLevel.WARN, LogLevel.INFO, LogLevel.DEBUG].contains(logLevel)) { + args "--${logLevel.name().toLowerCase(Locale.ENGLISH)}" + } + final String showStacktraceName = gradle.startParameter.showStacktrace.name() + assert ["INTERNAL_EXCEPTIONS", "ALWAYS", "ALWAYS_FULL"].contains(showStacktraceName) + if (showStacktraceName.equals("ALWAYS")) { + args "--stacktrace" + } else if (showStacktraceName.equals("ALWAYS_FULL")) { + args "--full-stacktrace" + } + standardOutput = new IndentingOutputStream(System.out, bwcVersion) + errorOutput = new IndentingOutputStream(System.err, bwcVersion) + doLast { + List missing = artifactFiles.grep { file -> + false == file.exists() + } + if (false == missing.empty) { + throw new InvalidUserDataException( + "Building ${bwcVersion} didn't generate expected files ${missing}") + } + } + } + + if (gradle.startParameter.taskNames == ["assemble"]) { + // Gradle needs the `artifacts` declaration, including `builtBy` bellow to make projects dependencies on this + // project work, but it will also trigger the build of these for the `assemble` task. + // Since these are only used for testing, we don't want to assemble them if `assemble` is the single command being + // ran. + logger.info("Skipping BWC builds since `assemble` is the only task name provided on the command line") + } else { + artifacts { + for (File artifactFile : artifactFiles) { + String artifactName = artifactFile.name.contains('oss') ? 'elasticsearch-oss' : 'elasticsearch' + String suffix = artifactFile.toString()[-3..-1] + 'default' file: artifactFile, name: artifactName, type: suffix, builtBy: buildBwcVersion + } + } + } +}} class IndentingOutputStream extends OutputStream { - public static final byte[] INDENT = " [bwc] ".getBytes(StandardCharsets.UTF_8) - private final OutputStream delegate + public final byte[] indent + private final OutputStream delegate - public IndentingOutputStream(OutputStream delegate) { - this.delegate = delegate - } - - @Override - public void write(int b) { - write([b] as int[], 0, 1) - } - - public void write(int[] bytes, int offset, int length) { - for (int i = 0; i < bytes.length; i++) { - delegate.write(bytes[i]) - if (bytes[i] == '\n') { - delegate.write(INDENT) - } + public IndentingOutputStream(OutputStream delegate, Object version) { + this.delegate = delegate + indent = " [${version}] ".getBytes(StandardCharsets.UTF_8) } - } -} + + @Override + public void write(int b) { + write([b] as int[], 0, 1) + } + + public void write(int[] bytes, int offset, int length) { + for (int i = 0; i < bytes.length; i++) { + delegate.write(bytes[i]) + if (bytes[i] == '\n') { + delegate.write(indent) + } + } + } +} \ No newline at end of file diff --git a/distribution/bwc/next-bugfix-snapshot/build.gradle b/distribution/bwc/maintenance/build.gradle similarity index 100% rename from distribution/bwc/next-bugfix-snapshot/build.gradle rename to distribution/bwc/maintenance/build.gradle diff --git a/distribution/bwc/next-minor-snapshot/build.gradle b/distribution/bwc/minor/build.gradle similarity index 100% rename from distribution/bwc/next-minor-snapshot/build.gradle rename to distribution/bwc/minor/build.gradle diff --git a/distribution/bwc/staged-minor-snapshot/build.gradle b/distribution/bwc/staged/build.gradle similarity index 100% rename from distribution/bwc/staged-minor-snapshot/build.gradle rename to distribution/bwc/staged/build.gradle diff --git a/distribution/src/bin/elasticsearch-service-mgr.exe b/distribution/src/bin/elasticsearch-service-mgr.exe index 730240403a7..e5d4b55d916 100644 Binary files a/distribution/src/bin/elasticsearch-service-mgr.exe and b/distribution/src/bin/elasticsearch-service-mgr.exe differ diff --git a/distribution/src/bin/elasticsearch-service-x64.exe b/distribution/src/bin/elasticsearch-service-x64.exe index dab7def7558..acd94f25076 100644 Binary files a/distribution/src/bin/elasticsearch-service-x64.exe and b/distribution/src/bin/elasticsearch-service-x64.exe differ diff --git a/docs/plugins/analysis-icu.asciidoc b/docs/plugins/analysis-icu.asciidoc index 35265140533..ef5744f7dff 100644 --- a/docs/plugins/analysis-icu.asciidoc +++ b/docs/plugins/analysis-icu.asciidoc @@ -38,7 +38,7 @@ normalization can be specified with the `name` parameter, which accepts `nfc`, convert `nfc` to `nfd` or `nfkc` to `nfkd` respectively: Which letters are normalized can be controlled by specifying the -`unicodeSetFilter` parameter, which accepts a +`unicode_set_filter` parameter, which accepts a http://icu-project.org/apiref/icu4j/com/ibm/icu/text/UnicodeSet.html[UnicodeSet]. Here are two examples, the default usage and a customised character filter: @@ -194,7 +194,7 @@ with the `name` parameter, which accepts `nfc`, `nfkc`, and `nfkc_cf` (default). Which letters are normalized can be controlled by specifying the -`unicodeSetFilter` parameter, which accepts a +`unicode_set_filter` parameter, which accepts a http://icu-project.org/apiref/icu4j/com/ibm/icu/text/UnicodeSet.html[UnicodeSet]. You should probably prefer the <>. @@ -273,7 +273,7 @@ The ICU folding token filter already does Unicode normalization, so there is no need to use Normalize character or token filter as well. Which letters are folded can be controlled by specifying the -`unicodeSetFilter` parameter, which accepts a +`unicode_set_filter` parameter, which accepts a http://icu-project.org/apiref/icu4j/com/ibm/icu/text/UnicodeSet.html[UnicodeSet]. The following example exempts Swedish characters from folding. It is important @@ -300,7 +300,7 @@ PUT icu_sample "filter": { "swedish_folding": { "type": "icu_folding", - "unicodeSetFilter": "[^åäöÅÄÖ]" + "unicode_set_filter": "[^åäöÅÄÖ]" } } } diff --git a/docs/plugins/repository.asciidoc b/docs/plugins/repository.asciidoc index 9a4e90bebd7..d4441708018 100644 --- a/docs/plugins/repository.asciidoc +++ b/docs/plugins/repository.asciidoc @@ -32,7 +32,7 @@ The GCS repository plugin adds support for using Google Cloud Storage service as The following plugin has been contributed by our community: -* https://github.com/wikimedia/search-repository-swift[Openstack Swift] (by Wikimedia Foundation) +* https://github.com/BigDataBoutique/elasticsearch-repository-swift[Openstack Swift] (by Wikimedia Foundation and BigData Boutique) include::repository-azure.asciidoc[] diff --git a/docs/reference/docs/delete-by-query.asciidoc b/docs/reference/docs/delete-by-query.asciidoc index 6793171d011..39ac9b134e3 100644 --- a/docs/reference/docs/delete-by-query.asciidoc +++ b/docs/reference/docs/delete-by-query.asciidoc @@ -277,7 +277,7 @@ The number of requests per second effectively executed during the delete by quer `throttled_until_millis`:: -This field should always be equal to zero in a delete by query response. It only +This field should always be equal to zero in a `_delete_by_query` response. It only has meaning when using the <>, where it indicates the next time (in milliseconds since epoch) a throttled request will be executed again in order to conform to `requests_per_second`. diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc index bf8a2057b39..1fd49307701 100644 --- a/docs/reference/docs/reindex.asciidoc +++ b/docs/reference/docs/reindex.asciidoc @@ -671,7 +671,7 @@ The number of requests per second effectively executed during the reindex. `throttled_until_millis`:: -This field should always be equal to zero in a `_delete_by_query` response. It only +This field should always be equal to zero in a `_reindex` response. It only has meaning when using the <>, where it indicates the next time (in milliseconds since epoch) a throttled request will be executed again in order to conform to `requests_per_second`. diff --git a/docs/reference/docs/update-by-query.asciidoc b/docs/reference/docs/update-by-query.asciidoc index 6326ee44bb2..fab21aa570f 100644 --- a/docs/reference/docs/update-by-query.asciidoc +++ b/docs/reference/docs/update-by-query.asciidoc @@ -237,7 +237,7 @@ batch size is `1000`, so if the `requests_per_second` is set to `500`: [source,txt] -------------------------------------------------- target_time = 1000 / 500 per second = 2 seconds -wait_time = target_time - delete_time = 2 seconds - .5 seconds = 1.5 seconds +wait_time = target_time - write_time = 2 seconds - .5 seconds = 1.5 seconds -------------------------------------------------- Since the batch is issued as a single `_bulk` request large batch sizes will @@ -332,7 +332,7 @@ The number of requests per second effectively executed during the update by quer `throttled_until_millis`:: -This field should always be equal to zero in a delete by query response. It only +This field should always be equal to zero in an `_update_by_query` response. It only has meaning when using the <>, where it indicates the next time (in milliseconds since epoch) a throttled request will be executed again in order to conform to `requests_per_second`. diff --git a/docs/reference/migration/migrate_7_0/api.asciidoc b/docs/reference/migration/migrate_7_0/api.asciidoc index 71a8e1aa015..34720bb182d 100644 --- a/docs/reference/migration/migrate_7_0/api.asciidoc +++ b/docs/reference/migration/migrate_7_0/api.asciidoc @@ -102,3 +102,8 @@ status 200 - OK is now returned instead at all times. The Put User API response was changed in 6.5.0 to add the `created` field outside of the user object where it previously had been. In 7.0.0 the user object has been removed in favor of the top level `created` field. + +[float] +==== Source filtering url parameters `_source_include` and `_source_exclude` have been removed + +The deprecated in 6.x url parameters are now removed. Use `_source_includes` and `_source_excludes` instead. diff --git a/docs/reference/modules/indices/query_cache.asciidoc b/docs/reference/modules/indices/query_cache.asciidoc index f6cdf71925a..aaa1ab17428 100644 --- a/docs/reference/modules/indices/query_cache.asciidoc +++ b/docs/reference/modules/indices/query_cache.asciidoc @@ -5,6 +5,7 @@ The query cache is responsible for caching the results of queries. There is one queries cache per node that is shared by all shards. The cache implements an LRU eviction policy: when a cache becomes full, the least recently used data is evicted to make way for new data. +It is not possible to look at the contents being cached. The query cache only caches queries which are being used in a filter context. diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionPlugin.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionPlugin.java index fd80c56cdbe..cd0b09eca8c 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionPlugin.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionPlugin.java @@ -31,6 +31,6 @@ public class ExpressionPlugin extends Plugin implements ScriptPlugin { @Override public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { - return new ExpressionScriptEngine(settings); + return new ExpressionScriptEngine(); } } diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java index 150bca60273..e53d0ec3e8c 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java @@ -28,8 +28,6 @@ import org.apache.lucene.queries.function.valuesource.DoubleConstValueSource; import org.apache.lucene.search.SortField; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.mapper.DateFieldMapper; @@ -63,14 +61,10 @@ import java.util.Map; * * Only contexts returning numeric types or {@link Object} are supported. */ -public class ExpressionScriptEngine extends AbstractComponent implements ScriptEngine { +public class ExpressionScriptEngine implements ScriptEngine { public static final String NAME = "expression"; - public ExpressionScriptEngine(Settings settings) { - super(settings); - } - @Override public String getType() { return NAME; diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionFieldScriptTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionFieldScriptTests.java index b1872b30f1f..205e638314f 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionFieldScriptTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionFieldScriptTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.script.expression; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; @@ -64,7 +63,7 @@ public class ExpressionFieldScriptTests extends ESTestCase { when(fieldData.getFieldName()).thenReturn("field"); when(fieldData.load(anyObject())).thenReturn(atomicFieldData); - service = new ExpressionScriptEngine(Settings.EMPTY); + service = new ExpressionScriptEngine(); lookup = new SearchLookup(mapperService, ignored -> fieldData, null); } diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionNumberSortScriptTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionNumberSortScriptTests.java index 301fd2d4db7..e6bd503bfab 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionNumberSortScriptTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionNumberSortScriptTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.script.expression; import java.io.IOException; import java.text.ParseException; import java.util.Collections; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; @@ -63,7 +62,7 @@ public class ExpressionNumberSortScriptTests extends ESTestCase { when(fieldData.getFieldName()).thenReturn("field"); when(fieldData.load(anyObject())).thenReturn(atomicFieldData); - service = new ExpressionScriptEngine(Settings.EMPTY); + service = new ExpressionScriptEngine(); lookup = new SearchLookup(mapperService, ignored -> fieldData, null); } diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTermsSetQueryTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTermsSetQueryTests.java index c7eae2446a6..137f8e058cd 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTermsSetQueryTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionTermsSetQueryTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.script.expression; import java.io.IOException; import java.text.ParseException; import java.util.Collections; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; @@ -63,7 +62,7 @@ public class ExpressionTermsSetQueryTests extends ESTestCase { when(fieldData.getFieldName()).thenReturn("field"); when(fieldData.load(anyObject())).thenReturn(atomicFieldData); - service = new ExpressionScriptEngine(Settings.EMPTY); + service = new ExpressionScriptEngine(); lookup = new SearchLookup(mapperService, ignored -> fieldData, null); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java index 4e7ffbfb8d0..40062f2cb75 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless; import org.elasticsearch.SpecialPermission; -import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.painless.Compiler.Loader; import org.elasticsearch.painless.lookup.PainlessLookupBuilder; @@ -54,7 +53,7 @@ import static org.elasticsearch.painless.node.SSource.MainMethodReserved; /** * Implementation of a ScriptEngine for the Painless language. */ -public final class PainlessScriptEngine extends AbstractComponent implements ScriptEngine { +public final class PainlessScriptEngine implements ScriptEngine { /** * Standard name of the Painless language. @@ -90,8 +89,6 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr * @param settings The settings to initialize the engine with. */ public PainlessScriptEngine(Settings settings, Map, List> contexts) { - super(settings); - defaultCompilerSettings.setRegexesEnabled(CompilerSettings.REGEX_ENABLED.get(settings)); Map, Compiler> contextsToCompilers = new HashMap<>(); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java index 44823f9aa01..0338e0fba91 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java @@ -31,7 +31,6 @@ import org.apache.lucene.search.Query; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.search.SearchHit; @@ -56,8 +55,8 @@ import java.util.Map; final class PercolatorHighlightSubFetchPhase implements FetchSubPhase { private final HighlightPhase highlightPhase; - PercolatorHighlightSubFetchPhase(Settings settings, Map highlighters) { - this.highlightPhase = new HighlightPhase(settings, highlighters); + PercolatorHighlightSubFetchPhase(Map highlighters) { + this.highlightPhase = new HighlightPhase(highlighters); } boolean hitsExecutionNeeded(SearchContext context) { // for testing diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorPlugin.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorPlugin.java index 9c8b6e8c67f..863b46b54ac 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorPlugin.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorPlugin.java @@ -20,7 +20,6 @@ package org.elasticsearch.percolator; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; @@ -35,13 +34,6 @@ import static java.util.Collections.singletonList; import static java.util.Collections.singletonMap; public class PercolatorPlugin extends Plugin implements MapperPlugin, SearchPlugin { - - private final Settings settings; - - public PercolatorPlugin(Settings settings) { - this.settings = settings; - } - @Override public List> getQueries() { return singletonList(new QuerySpec<>(PercolateQueryBuilder.NAME, PercolateQueryBuilder::new, PercolateQueryBuilder::fromXContent)); @@ -51,7 +43,7 @@ public class PercolatorPlugin extends Plugin implements MapperPlugin, SearchPlug public List getFetchSubPhases(FetchPhaseConstructionContext context) { return Arrays.asList( new PercolatorMatchedSlotSubFetchPhase(), - new PercolatorHighlightSubFetchPhase(settings, context.getHighlighters()) + new PercolatorHighlightSubFetchPhase(context.getHighlighters()) ); } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java index e5f2160cfca..291a42c1466 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java @@ -28,7 +28,6 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.common.lucene.search.function.RandomScoreFunction; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; @@ -47,8 +46,7 @@ public class PercolatorHighlightSubFetchPhaseTests extends ESTestCase { public void testHitsExecutionNeeded() { PercolateQuery percolateQuery = new PercolateQuery("_name", ctx -> null, Collections.singletonList(new BytesArray("{}")), new MatchAllDocsQuery(), Mockito.mock(IndexSearcher.class), null, new MatchAllDocsQuery()); - PercolatorHighlightSubFetchPhase subFetchPhase = new PercolatorHighlightSubFetchPhase(Settings.EMPTY, - emptyMap()); + PercolatorHighlightSubFetchPhase subFetchPhase = new PercolatorHighlightSubFetchPhase(emptyMap()); SearchContext searchContext = Mockito.mock(SearchContext.class); Mockito.when(searchContext.highlight()).thenReturn(new SearchContextHighlight(Collections.emptyList())); Mockito.when(searchContext.query()).thenReturn(new MatchAllDocsQuery()); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java index 4784d7f5fe5..19c5739bbc6 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java @@ -53,7 +53,7 @@ public class ReindexSourceTargetValidationTests extends ESTestCase { .put(index("baz"), true) .put(index("source", "source_multi"), true) .put(index("source2", "source_multi"), true)).build(); - private static final IndexNameExpressionResolver INDEX_NAME_EXPRESSION_RESOLVER = new IndexNameExpressionResolver(Settings.EMPTY); + private static final IndexNameExpressionResolver INDEX_NAME_EXPRESSION_RESOLVER = new IndexNameExpressionResolver(); private static final AutoCreateIndex AUTO_CREATE_INDEX = new AutoCreateIndex(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), INDEX_NAME_EXPRESSION_RESOLVER); diff --git a/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobStore.java b/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobStore.java index 158ecff9b2b..a7042b8bfee 100644 --- a/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobStore.java +++ b/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobStore.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.BlobStoreException; -import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -34,7 +33,7 @@ import java.net.URL; /** * Read-only URL-based blob store */ -public class URLBlobStore extends AbstractComponent implements BlobStore { +public class URLBlobStore implements BlobStore { private final URL path; @@ -53,7 +52,6 @@ public class URLBlobStore extends AbstractComponent implements BlobStore { * @param path base URL */ public URLBlobStore(Settings settings, URL path) { - super(settings); this.path = path; this.bufferSizeInBytes = (int) settings.getAsBytesSize("repositories.uri.buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)).getBytes(); diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuFoldingTokenFilterFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuFoldingTokenFilterFactory.java index 7059d93ffc5..1997c589bc3 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuFoldingTokenFilterFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuFoldingTokenFilterFactory.java @@ -50,7 +50,7 @@ public class IcuFoldingTokenFilterFactory extends AbstractTokenFilterFactory imp public IcuFoldingTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); - this.normalizer = IcuNormalizerTokenFilterFactory.wrapWithUnicodeSetFilter(ICU_FOLDING_NORMALIZER, settings); + this.normalizer = IcuNormalizerTokenFilterFactory.wrapWithUnicodeSetFilter(indexSettings, ICU_FOLDING_NORMALIZER, settings); } @Override diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerCharFilterFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerCharFilterFactory.java index e43e163e1a0..86490ff486e 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerCharFilterFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerCharFilterFactory.java @@ -49,7 +49,7 @@ public class IcuNormalizerCharFilterFactory extends AbstractCharFilterFactory im } Normalizer2 normalizer = Normalizer2.getInstance( null, method, "compose".equals(mode) ? Normalizer2.Mode.COMPOSE : Normalizer2.Mode.DECOMPOSE); - this.normalizer = IcuNormalizerTokenFilterFactory.wrapWithUnicodeSetFilter(normalizer, settings); + this.normalizer = IcuNormalizerTokenFilterFactory.wrapWithUnicodeSetFilter(indexSettings, normalizer, settings); } @Override diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerTokenFilterFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerTokenFilterFactory.java index 1ef09f86052..d3e59bf9488 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerTokenFilterFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuNormalizerTokenFilterFactory.java @@ -23,7 +23,10 @@ import com.ibm.icu.text.FilteredNormalizer2; import com.ibm.icu.text.Normalizer2; import com.ibm.icu.text.UnicodeSet; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.TokenStream; +import org.elasticsearch.Version; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -35,14 +38,15 @@ import org.elasticsearch.index.IndexSettings; *

The {@code unicodeSetFilter} attribute can be used to provide the UniCodeSet for filtering.

*/ public class IcuNormalizerTokenFilterFactory extends AbstractTokenFilterFactory implements MultiTermAwareComponent { - + private static final DeprecationLogger deprecationLogger = + new DeprecationLogger(LogManager.getLogger(IcuNormalizerTokenFilterFactory.class)); private final Normalizer2 normalizer; public IcuNormalizerTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); String method = settings.get("name", "nfkc_cf"); Normalizer2 normalizer = Normalizer2.getInstance(null, method, Normalizer2.Mode.COMPOSE); - this.normalizer = wrapWithUnicodeSetFilter(normalizer, settings); + this.normalizer = wrapWithUnicodeSetFilter(indexSettings, normalizer, settings); } @Override @@ -55,8 +59,17 @@ public class IcuNormalizerTokenFilterFactory extends AbstractTokenFilterFactory return this; } - static Normalizer2 wrapWithUnicodeSetFilter(final Normalizer2 normalizer, Settings settings) { + static Normalizer2 wrapWithUnicodeSetFilter(final IndexSettings indexSettings, + final Normalizer2 normalizer, + final Settings settings) { String unicodeSetFilter = settings.get("unicodeSetFilter"); + if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0_alpha1)) { + if (unicodeSetFilter != null) { + deprecationLogger.deprecated("[unicodeSetFilter] has been deprecated in favor of [unicode_set_filter]"); + } else { + unicodeSetFilter = settings.get("unicode_set_filter"); + } + } if (unicodeSetFilter != null) { UnicodeSet unicodeSet = new UnicodeSet(unicodeSetFilter); diff --git a/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/10_basic.yml b/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/10_basic.yml index c9ff2b2fb64..bb2cf97a897 100644 --- a/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/10_basic.yml +++ b/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/10_basic.yml @@ -48,6 +48,61 @@ --- "Normalization with a UnicodeSet Filter": - do: + indices.create: + index: test + body: + settings: + index: + analysis: + char_filter: + charfilter_icu_normalizer: + type: icu_normalizer + unicode_set_filter: "[^ß]" + filter: + tokenfilter_icu_normalizer: + type: icu_normalizer + unicode_set_filter: "[^ßB]" + tokenfilter_icu_folding: + type: icu_folding + unicode_set_filter: "[^â]" + - do: + indices.analyze: + index: test + body: + char_filter: ["charfilter_icu_normalizer"] + tokenizer: keyword + text: charfilter Föo Bâr Ruß + - length: { tokens: 1 } + - match: { tokens.0.token: charfilter föo bâr ruß } + - do: + indices.analyze: + index: test + body: + tokenizer: keyword + filter: ["tokenfilter_icu_normalizer"] + text: tokenfilter Föo Bâr Ruß + - length: { tokens: 1 } + - match: { tokens.0.token: tokenfilter föo Bâr ruß } + - do: + indices.analyze: + index: test + body: + tokenizer: keyword + filter: ["tokenfilter_icu_folding"] + text: icufolding Föo Bâr Ruß + - length: { tokens: 1 } + - match: { tokens.0.token: icufolding foo bâr russ } + +--- +"Normalization with a CamcelCase UnicodeSet Filter": + - skip: + version: " - 6.99.99" + reason: unicodeSetFilter deprecated in 7.0.0, replaced by unicode_set_filter + features: "warnings" + + - do: + warnings: + - "[unicodeSetFilter] has been deprecated in favor of [unicode_set_filter]" indices.create: index: test body: diff --git a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java index a69bc6eb487..987942ef4f0 100644 --- a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java +++ b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java @@ -108,7 +108,6 @@ public class AzureUnicastHostsProvider extends AbstractComponent implements Unic public AzureUnicastHostsProvider(Settings settings, AzureComputeService azureComputeService, TransportService transportService, NetworkService networkService) { - super(settings); this.settings = settings; this.azureComputeService = azureComputeService; this.transportService = transportService; diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java index a65500d9e22..35c2b5288bf 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java @@ -33,7 +33,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.LazyInitializable; import java.util.Random; @@ -46,10 +45,6 @@ class AwsEc2ServiceImpl extends AbstractComponent implements AwsEc2Service { private final AtomicReference> lazyClientReference = new AtomicReference<>(); - AwsEc2ServiceImpl(Settings settings) { - super(settings); - } - private AmazonEC2 buildClient(Ec2ClientSettings clientSettings) { final AWSCredentialsProvider credentials = buildCredentials(logger, clientSettings); final ClientConfiguration configuration = buildConfiguration(logger, clientSettings); diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java index 8f503704298..2817c1c3b60 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java @@ -69,7 +69,6 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos private final TransportAddressesCache dynamicHosts; AwsEc2UnicastHostsProvider(Settings settings, TransportService transportService, AwsEc2Service awsEc2Service) { - super(settings); this.transportService = transportService; this.awsEc2Service = awsEc2Service; diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java index 49beb4fd995..6b26808f74c 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java @@ -78,7 +78,7 @@ public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, Reloa protected final AwsEc2Service ec2Service; public Ec2DiscoveryPlugin(Settings settings) { - this(settings, new AwsEc2ServiceImpl(settings)); + this(settings, new AwsEc2ServiceImpl()); } protected Ec2DiscoveryPlugin(Settings settings, AwsEc2ServiceImpl ec2Service) { @@ -91,7 +91,7 @@ public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, Reloa @Override public NetworkService.CustomNameResolver getCustomNameResolver(Settings settings) { logger.debug("Register _ec2_, _ec2:xxx_ network names"); - return new Ec2NameResolver(settings); + return new Ec2NameResolver(); } @Override diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2NameResolver.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2NameResolver.java index 92bd01dd9ae..4cfaba23ed4 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2NameResolver.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2NameResolver.java @@ -23,7 +23,6 @@ import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.network.NetworkService.CustomNameResolver; -import org.elasticsearch.common.settings.Settings; import java.io.BufferedReader; import java.io.IOException; @@ -79,13 +78,6 @@ class Ec2NameResolver extends AbstractComponent implements CustomNameResolver { } } - /** - * Construct a {@link CustomNameResolver}. - */ - Ec2NameResolver(Settings settings) { - super(settings); - } - /** * @param type the ec2 hostname type to discover. * @return the appropriate host resolved from ec2 meta-data, or null if it cannot be obtained. diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java index 0596dd697b2..e44087f9413 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java @@ -24,8 +24,6 @@ import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.services.ec2.AmazonEC2; import com.amazonaws.services.ec2.model.Tag; -import org.elasticsearch.common.settings.Settings; - import java.util.List; public class AwsEc2ServiceMock extends AwsEc2ServiceImpl { @@ -33,8 +31,7 @@ public class AwsEc2ServiceMock extends AwsEc2ServiceImpl { private final int nodes; private final List> tagsList; - public AwsEc2ServiceMock(Settings settings, int nodes, List> tagsList) { - super(settings); + public AwsEc2ServiceMock(int nodes, List> tagsList) { this.nodes = nodes; this.tagsList = tagsList; } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginMock.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginMock.java index a92bd243bc9..bc45a95c2f3 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginMock.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginMock.java @@ -32,7 +32,7 @@ public class Ec2DiscoveryPluginMock extends Ec2DiscoveryPlugin { } public Ec2DiscoveryPluginMock(Settings settings, int nodes, List> tagsList) { - super(settings, new AwsEc2ServiceMock(settings, nodes, tagsList)); + super(settings, new AwsEc2ServiceMock(nodes, tagsList)); } } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java index 295df0c818a..aa619409c16 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java @@ -298,7 +298,7 @@ public class Ec2DiscoveryTests extends ESTestCase { } public void testGetNodeListEmptyCache() throws Exception { - AwsEc2Service awsEc2Service = new AwsEc2ServiceMock(Settings.EMPTY, 1, null); + AwsEc2Service awsEc2Service = new AwsEc2ServiceMock(1, null); DummyEc2HostProvider provider = new DummyEc2HostProvider(Settings.EMPTY, transportService, awsEc2Service) { @Override protected List fetchDynamicNodes() { diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2NetworkTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2NetworkTests.java index 52bf7e67b0d..9e904c47f17 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2NetworkTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2NetworkTests.java @@ -32,6 +32,9 @@ import static org.hamcrest.Matchers.containsString; /** * Test for EC2 network.host settings. + *

+ * Warning: This test doesn't assert that the exceptions are thrown. + * They aren't. */ public class Ec2NetworkTests extends ESTestCase { /** @@ -42,10 +45,11 @@ public class Ec2NetworkTests extends ESTestCase { .put("network.host", "_ec2_") .build(); - NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver(nodeSettings))); + NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver())); // TODO we need to replace that with a mock. For now we check the URL we are supposed to reach. try { networkService.resolveBindHostAddresses(null); + // note: this can succeed and the test can pass } catch (IOException e) { assertThat(e.getMessage(), containsString("local-ipv4")); } @@ -59,10 +63,11 @@ public class Ec2NetworkTests extends ESTestCase { .put("network.host", "_ec2:publicIp_") .build(); - NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver(nodeSettings))); + NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver())); // TODO we need to replace that with a mock. For now we check the URL we are supposed to reach. try { networkService.resolveBindHostAddresses(null); + // note: this can succeed and the test can pass } catch (IOException e) { assertThat(e.getMessage(), containsString("public-ipv4")); } @@ -76,10 +81,11 @@ public class Ec2NetworkTests extends ESTestCase { .put("network.host", "_ec2:privateIp_") .build(); - NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver(nodeSettings))); + NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver())); // TODO we need to replace that with a mock. For now we check the URL we are supposed to reach. try { networkService.resolveBindHostAddresses(null); + // note: this can succeed and the test can pass } catch (IOException e) { assertThat(e.getMessage(), containsString("local-ipv4")); } @@ -93,10 +99,11 @@ public class Ec2NetworkTests extends ESTestCase { .put("network.host", "_ec2:privateIpv4_") .build(); - NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver(nodeSettings))); + NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver())); // TODO we need to replace that with a mock. For now we check the URL we are supposed to reach. try { networkService.resolveBindHostAddresses(null); + // note: this can succeed and the test can pass } catch (IOException e) { assertThat(e.getMessage(), containsString("local-ipv4")); } @@ -110,10 +117,11 @@ public class Ec2NetworkTests extends ESTestCase { .put("network.host", "_ec2:privateDns_") .build(); - NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver(nodeSettings))); + NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver())); // TODO we need to replace that with a mock. For now we check the URL we are supposed to reach. try { networkService.resolveBindHostAddresses(null); + // note: this can succeed and the test can pass } catch (IOException e) { assertThat(e.getMessage(), containsString("local-hostname")); } @@ -127,10 +135,11 @@ public class Ec2NetworkTests extends ESTestCase { .put("network.host", "_ec2:publicIpv4_") .build(); - NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver(nodeSettings))); + NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver())); // TODO we need to replace that with a mock. For now we check the URL we are supposed to reach. try { networkService.resolveBindHostAddresses(null); + // note: this can succeed and the test can pass } catch (IOException e) { assertThat(e.getMessage(), containsString("public-ipv4")); } @@ -144,10 +153,11 @@ public class Ec2NetworkTests extends ESTestCase { .put("network.host", "_ec2:publicDns_") .build(); - NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver(nodeSettings))); + NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver())); // TODO we need to replace that with a mock. For now we check the URL we are supposed to reach. try { networkService.resolveBindHostAddresses(null); + // note: this can succeed and the test can pass } catch (IOException e) { assertThat(e.getMessage(), containsString("public-hostname")); } @@ -158,11 +168,7 @@ public class Ec2NetworkTests extends ESTestCase { * network.host: _local_ */ public void testNetworkHostCoreLocal() throws IOException { - Settings nodeSettings = Settings.builder() - .put("network.host", "_local_") - .build(); - - NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver(nodeSettings))); + NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver())); InetAddress[] addresses = networkService.resolveBindHostAddresses(null); assertThat(addresses, arrayContaining(networkService.resolveBindHostAddresses(new String[] { "_local_" }))); } diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java index e2e55b018d2..116bf1842d0 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java @@ -108,7 +108,6 @@ public class GceInstancesServiceImpl extends AbstractComponent implements GceIns private final boolean validateCerts; public GceInstancesServiceImpl(Settings settings) { - super(settings); this.settings = settings; this.validateCerts = GCE_VALIDATE_CERTIFICATES.get(settings); this.project = resolveProject(); diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/network/GceNameResolver.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/network/GceNameResolver.java index 46c4ac7bac5..e53a1e241bb 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/network/GceNameResolver.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/network/GceNameResolver.java @@ -22,9 +22,7 @@ package org.elasticsearch.cloud.gce.network; import org.elasticsearch.cloud.gce.GceMetadataService; import org.elasticsearch.cloud.gce.util.Access; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.network.NetworkService.CustomNameResolver; -import org.elasticsearch.common.settings.Settings; import java.io.IOException; import java.net.InetAddress; @@ -39,7 +37,7 @@ import java.net.InetAddress; *

  • _gce:hostname_
  • * */ -public class GceNameResolver extends AbstractComponent implements CustomNameResolver { +public class GceNameResolver implements CustomNameResolver { private final GceMetadataService gceMetadataService; @@ -73,8 +71,7 @@ public class GceNameResolver extends AbstractComponent implements CustomNameReso /** * Construct a {@link CustomNameResolver}. */ - public GceNameResolver(Settings settings, GceMetadataService gceMetadataService) { - super(settings); + public GceNameResolver(GceMetadataService gceMetadataService) { this.gceMetadataService = gceMetadataService; } diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java index 9f90ef3a308..2d1bb07b239 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java @@ -74,7 +74,6 @@ public class GceUnicastHostsProvider extends AbstractComponent implements Unicas public GceUnicastHostsProvider(Settings settings, GceInstancesService gceInstancesService, TransportService transportService, NetworkService networkService) { - super(settings); this.settings = settings; this.gceInstancesService = gceInstancesService; this.transportService = transportService; diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java index 2d94c72323c..9aef304e08f 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java @@ -95,7 +95,7 @@ public class GceDiscoveryPlugin extends Plugin implements DiscoveryPlugin, Close @Override public NetworkService.CustomNameResolver getCustomNameResolver(Settings settings) { logger.debug("Register _gce_, _gce:xxx network names"); - return new GceNameResolver(settings, new GceMetadataService(settings)); + return new GceNameResolver(new GceMetadataService(settings)); } @Override diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceNetworkTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceNetworkTests.java index 1fe1297904b..94f2959917d 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceNetworkTests.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceNetworkTests.java @@ -107,7 +107,7 @@ public class GceNetworkTests extends ESTestCase { .build(); GceMetadataServiceMock mock = new GceMetadataServiceMock(nodeSettings); - NetworkService networkService = new NetworkService(Collections.singletonList(new GceNameResolver(nodeSettings, mock))); + NetworkService networkService = new NetworkService(Collections.singletonList(new GceNameResolver(mock))); try { InetAddress[] addresses = networkService.resolveBindHostAddresses( NetworkService.GLOBAL_NETWORK_BINDHOST_SETTING.get(nodeSettings).toArray(Strings.EMPTY_ARRAY)); diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java index f4bc362e536..6268a337284 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import java.io.IOException; import java.io.InputStream; import java.net.URISyntaxException; @@ -47,9 +46,8 @@ public class AzureBlobStore extends AbstractComponent implements BlobStore { private final String container; private final LocationMode locationMode; - public AzureBlobStore(RepositoryMetaData metadata, Settings settings, AzureStorageService service) + public AzureBlobStore(RepositoryMetaData metadata, AzureStorageService service) throws URISyntaxException, StorageException { - super(settings); this.container = Repository.CONTAINER_SETTING.get(metadata.settings()); this.clientName = Repository.CLIENT_NAME.get(metadata.settings()); this.service = service; diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java index 0797c78af33..d11c8ee81d5 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java @@ -125,7 +125,7 @@ public class AzureRepository extends BlobStoreRepository { */ @Override protected AzureBlobStore createBlobStore() throws URISyntaxException, StorageException { - final AzureBlobStore blobStore = new AzureBlobStore(metadata, environment.settings(), storageService); + final AzureBlobStore blobStore = new AzureBlobStore(metadata, storageService); logger.debug((org.apache.logging.log4j.util.Supplier) () -> new ParameterizedMessage( "using container [{}], chunk_size [{}], compress [{}], base_path [{}]", diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java index 75bcc3e9c1e..ec461cf38f2 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java @@ -70,7 +70,6 @@ public class AzureStorageService extends AbstractComponent { volatile Map storageSettings = emptyMap(); public AzureStorageService(Settings settings) { - super(settings); // eagerly load client settings so that secure settings are read final Map clientsSettings = AzureStorageSettings.load(settings); refreshAndClearCache(clientsSettings); diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreContainerTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreContainerTests.java index 10deeb4676f..a06dd7c3f28 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreContainerTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreContainerTests.java @@ -34,7 +34,7 @@ public class AzureBlobStoreContainerTests extends ESBlobStoreContainerTestCase { try { RepositoryMetaData repositoryMetaData = new RepositoryMetaData("azure", "ittest", Settings.EMPTY); AzureStorageServiceMock client = new AzureStorageServiceMock(); - return new AzureBlobStore(repositoryMetaData, Settings.EMPTY, client); + return new AzureBlobStore(repositoryMetaData, client); } catch (URISyntaxException | StorageException e) { throw new IOException(e); } diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreTests.java index 025ee45b9c3..9a0c9039d08 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobStoreTests.java @@ -34,7 +34,7 @@ public class AzureBlobStoreTests extends ESBlobStoreTestCase { try { RepositoryMetaData repositoryMetaData = new RepositoryMetaData("azure", "ittest", Settings.EMPTY); AzureStorageServiceMock client = new AzureStorageServiceMock(); - return new AzureBlobStore(repositoryMetaData, Settings.EMPTY, client); + return new AzureBlobStore(repositoryMetaData, client); } catch (URISyntaxException | StorageException e) { throw new IOException(e); } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index 1e94467f5a5..7894f9fc7df 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -37,7 +37,6 @@ import org.elasticsearch.common.blobstore.BlobStoreException; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.internal.io.Streams; import java.io.ByteArrayOutputStream; @@ -69,8 +68,7 @@ class GoogleCloudStorageBlobStore extends AbstractComponent implements BlobStore private final String clientName; private final GoogleCloudStorageService storageService; - GoogleCloudStorageBlobStore(Settings settings, String bucketName, String clientName, GoogleCloudStorageService storageService) { - super(settings); + GoogleCloudStorageBlobStore(String bucketName, String clientName, GoogleCloudStorageService storageService) { this.bucketName = bucketName; this.clientName = clientName; this.storageService = storageService; diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java index 12e7fd26ff5..3186d2547a3 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java @@ -38,14 +38,14 @@ public class GoogleCloudStoragePlugin extends Plugin implements RepositoryPlugin final GoogleCloudStorageService storageService; public GoogleCloudStoragePlugin(final Settings settings) { - this.storageService = createStorageService(settings); + this.storageService = createStorageService(); // eagerly load client settings so that secure settings are readable (not closed) reload(settings); } // overridable for tests - protected GoogleCloudStorageService createStorageService(Settings settings) { - return new GoogleCloudStorageService(settings); + protected GoogleCloudStorageService createStorageService() { + return new GoogleCloudStorageService(); } @Override diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java index bfe48038eef..508f6b8cdc4 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java @@ -91,7 +91,7 @@ class GoogleCloudStorageRepository extends BlobStoreRepository { @Override protected GoogleCloudStorageBlobStore createBlobStore() { - return new GoogleCloudStorageBlobStore(settings, bucket, clientName, storageService); + return new GoogleCloudStorageBlobStore(bucket, clientName, storageService); } @Override diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java index b24674da174..b38957651df 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -32,7 +32,6 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.LazyInitializable; @@ -54,10 +53,6 @@ public class GoogleCloudStorageService extends AbstractComponent { */ private final AtomicReference>> clientsCache = new AtomicReference<>(emptyMap()); - public GoogleCloudStorageService(final Settings settings) { - super(settings); - } - /** * Refreshes the client settings and clears the client cache. Subsequent calls to * {@code GoogleCloudStorageService#client} will return new clients constructed diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java index 0cc1243f283..2f23011d4d9 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.repositories.gcs; import org.elasticsearch.common.blobstore.BlobStore; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.repositories.ESBlobStoreContainerTestCase; import java.util.Locale; @@ -42,6 +41,6 @@ public class GoogleCloudStorageBlobStoreContainerTests extends ESBlobStoreContai } catch (final Exception e) { throw new RuntimeException(e); } - return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucketName, clientName, storageService); + return new GoogleCloudStorageBlobStore(bucketName, clientName, storageService); } } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index 6d5c1bbf853..db166a228b5 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -79,17 +79,12 @@ public class GoogleCloudStorageBlobStoreRepositoryTests extends ESBlobStoreRepos } @Override - protected GoogleCloudStorageService createStorageService(Settings settings) { - return new MockGoogleCloudStorageService(settings); + protected GoogleCloudStorageService createStorageService() { + return new MockGoogleCloudStorageService(); } } public static class MockGoogleCloudStorageService extends GoogleCloudStorageService { - - MockGoogleCloudStorageService(Settings settings) { - super(settings); - } - @Override public Storage client(String clientName) { return new MockStorage(BUCKET, blobs); diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreTests.java index 4634bd3274a..e2adfed94bb 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.repositories.gcs; import org.elasticsearch.common.blobstore.BlobStore; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.repositories.ESBlobStoreTestCase; import java.util.Locale; @@ -42,6 +41,6 @@ public class GoogleCloudStorageBlobStoreTests extends ESBlobStoreTestCase { } catch (final Exception e) { throw new RuntimeException(e); } - return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucketName, clientName, storageService); + return new GoogleCloudStorageBlobStore(bucketName, clientName, storageService); } } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java index 0130d2c576c..15522829419 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java @@ -63,7 +63,7 @@ public class GoogleCloudStorageServiceTests extends ESTestCase { .put(GoogleCloudStorageClientSettings.ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), endpoint) .put(GoogleCloudStorageClientSettings.PROJECT_ID_SETTING.getConcreteSettingForNamespace(clientName).getKey(), projectIdName) .build(); - final GoogleCloudStorageService service = new GoogleCloudStorageService(settings); + final GoogleCloudStorageService service = new GoogleCloudStorageService(); service.refreshAndClearCache(GoogleCloudStorageClientSettings.load(settings)); final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> service.client("another_client")); assertThat(e.getMessage(), Matchers.startsWith("Unknown client name")); diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java index 7715c7086a6..e1ffc7a22d4 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java @@ -32,7 +32,6 @@ import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.BlobStoreException; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import java.io.IOException; @@ -55,9 +54,8 @@ class S3BlobStore extends AbstractComponent implements BlobStore { private final StorageClass storageClass; - S3BlobStore(Settings settings, S3Service service, String clientName, String bucket, boolean serverSideEncryption, + S3BlobStore(S3Service service, String clientName, String bucket, boolean serverSideEncryption, ByteSizeValue bufferSize, String cannedACL, String storageClass) { - super(settings); this.service = service; this.clientName = clientName; this.bucket = bucket; diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index 2fbb7bb56d6..e0e34e40f3c 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -245,7 +245,7 @@ class S3Repository extends BlobStoreRepository { protected S3BlobStore createBlobStore() { if (reference != null) { assert S3ClientSettings.checkDeprecatedCredentials(metadata.settings()) : metadata.name(); - return new S3BlobStore(settings, service, clientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass) { + return new S3BlobStore(service, clientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass) { @Override public AmazonS3Reference clientReference() { if (reference.tryIncRef()) { @@ -256,7 +256,7 @@ class S3Repository extends BlobStoreRepository { } }; } else { - return new S3BlobStore(settings, service, clientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass); + return new S3BlobStore(service, clientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass); } } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java index da3219f2aef..a2f9da5f846 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java @@ -64,7 +64,7 @@ public class S3RepositoryPlugin extends Plugin implements RepositoryPlugin, Relo protected final S3Service service; public S3RepositoryPlugin(final Settings settings) { - this(settings, new S3Service(settings)); + this(settings, new S3Service()); } S3RepositoryPlugin(final Settings settings, final S3Service service) { diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java index a431f4da1fd..95313f90656 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java @@ -33,7 +33,6 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import java.io.Closeable; import java.io.IOException; @@ -47,10 +46,6 @@ class S3Service extends AbstractComponent implements Closeable { private volatile Map clientsCache = emptyMap(); private volatile Map clientsSettings = emptyMap(); - S3Service(Settings settings) { - super(settings); - } - /** * Refreshes the settings for the AmazonS3 clients and clears the cache of * existing clients. New clients will be build using these new settings. Old diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java index 1c3c47943a0..ec5d5578a03 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java @@ -68,11 +68,6 @@ public class RepositoryCredentialsTests extends ESTestCase { } static final class ProxyS3Service extends S3Service { - - ProxyS3Service(Settings settings) { - super(settings); - } - @Override AmazonS3 buildClient(final S3ClientSettings clientSettings) { final AmazonS3 client = super.buildClient(clientSettings); @@ -82,7 +77,7 @@ public class RepositoryCredentialsTests extends ESTestCase { } ProxyS3RepositoryPlugin(Settings settings) { - super(settings, new ProxyS3Service(settings)); + super(settings, new ProxyS3Service()); } @Override diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index 3f75ae94aa9..b4c2f81a3f8 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -118,7 +118,7 @@ public class S3BlobStoreRepositoryTests extends ESBlobStoreRepositoryIntegTestCa @Override public Map getRepositories(final Environment env, final NamedXContentRegistry registry) { return Collections.singletonMap(S3Repository.TYPE, - (metadata) -> new S3Repository(metadata, env.settings(), registry, new S3Service(env.settings()) { + (metadata) -> new S3Repository(metadata, env.settings(), registry, new S3Service() { @Override AmazonS3 buildClient(S3ClientSettings clientSettings) { return new MockAmazonS3(blobs, bucket, serverSideEncryption, cannedACL, storageClass); diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreTests.java index 55df03ff34a..a44ad706b23 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreTests.java @@ -24,7 +24,6 @@ import com.amazonaws.services.s3.model.CannedAccessControlList; import com.amazonaws.services.s3.model.StorageClass; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.BlobStoreException; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.repositories.ESBlobStoreTestCase; @@ -117,13 +116,13 @@ public class S3BlobStoreTests extends ESBlobStoreTestCase { final String theClientName = randomAlphaOfLength(4); final AmazonS3 client = new MockAmazonS3(new ConcurrentHashMap<>(), bucket, serverSideEncryption, cannedACL, storageClass); - final S3Service service = new S3Service(Settings.EMPTY) { + final S3Service service = new S3Service() { @Override public synchronized AmazonS3Reference client(String clientName) { assert theClientName.equals(clientName); return new AmazonS3Reference(client); } }; - return new S3BlobStore(Settings.EMPTY, service, theClientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass); + return new S3BlobStore(service, theClientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass); } } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java index b76af23402c..ecfa8e8d97d 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java @@ -57,10 +57,6 @@ public class S3RepositoryTests extends ESTestCase { } private static class DummyS3Service extends S3Service { - DummyS3Service() { - super(Settings.EMPTY); - } - @Override public AmazonS3Reference client(String clientName) { return new AmazonS3Reference(new DummyS3Client()); diff --git a/qa/full-cluster-restart/build.gradle b/qa/full-cluster-restart/build.gradle index 8b305462e4d..e812af9522b 100644 --- a/qa/full-cluster-restart/build.gradle +++ b/qa/full-cluster-restart/build.gradle @@ -98,7 +98,7 @@ test.enabled = false // no unit tests for rolling upgrades, only the rest integr // basic integ tests includes testing bwc against the most recent version task integTest { if (project.bwc_tests_enabled) { - for (final def version : bwcVersions.snapshotsIndexCompatible) { + for (final def version : bwcVersions.unreleasedIndexCompatible) { dependsOn "v${version}#bwcTest" } } diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index ac57d51def7..62de043b5cc 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -65,7 +65,7 @@ test.enabled = false // no unit tests for rolling upgrades, only the rest integr // basic integ tests includes testing bwc against the most recent version task integTest { if (project.bwc_tests_enabled) { - for (final def version : bwcVersions.snapshotsWireCompatible) { + for (final def version : bwcVersions.unreleasedWireCompatible) { dependsOn "v${version}#bwcTest" } } diff --git a/qa/rolling-upgrade/build.gradle b/qa/rolling-upgrade/build.gradle index bfd37863cc2..4e27511fe04 100644 --- a/qa/rolling-upgrade/build.gradle +++ b/qa/rolling-upgrade/build.gradle @@ -145,7 +145,7 @@ test.enabled = false // no unit tests for rolling upgrades, only the rest integr // basic integ tests includes testing bwc against the most recent version task integTest { if (project.bwc_tests_enabled) { - for (final def version : bwcVersions.snapshotsWireCompatible) { + for (final def version : bwcVersions.unreleasedWireCompatible) { dependsOn "v${version}#bwcTest" } } diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java index 81724bd72ab..04f01cf0f0e 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java @@ -298,7 +298,7 @@ public class ContextAndHeaderTransportIT extends HttpSmokeTestCase { ResourceWatcherService resourceWatcherService, ScriptService scriptService, NamedXContentRegistry xContentRegistry, Environment environment, NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { - loggingFilter.set(new LoggingFilter(clusterService.getSettings(), threadPool)); + loggingFilter.set(new LoggingFilter(threadPool)); return Collections.emptyList(); } @@ -313,8 +313,7 @@ public class ContextAndHeaderTransportIT extends HttpSmokeTestCase { private final ThreadPool threadPool; - public LoggingFilter(Settings settings, ThreadPool pool) { - super(settings); + public LoggingFilter(ThreadPool pool) { this.threadPool = pool; } diff --git a/qa/verify-version-constants/build.gradle b/qa/verify-version-constants/build.gradle index 30c879ec614..46b5df8ae0c 100644 --- a/qa/verify-version-constants/build.gradle +++ b/qa/verify-version-constants/build.gradle @@ -57,7 +57,7 @@ test.enabled = false task integTest { if (project.bwc_tests_enabled) { - final def version = bwcVersions.snapshotsIndexCompatible.first() + final def version = bwcVersions.unreleasedIndexCompatible.first() dependsOn "v${version}#bwcTest" } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml index b1f81db92d6..f9247d10761 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml @@ -70,37 +70,3 @@ - match: { _id: "1" } - match: { fields.count: [1] } - match: { _source.include.field1: v1 } - ---- -"Deprecated _source_include and _source_exclude": - - - skip: - version: " - 6.5.99" - reason: _source_include and _source_exclude are deprecated from 6.6.0 - features: "warnings" - - - do: - indices.create: - index: test_1 - body: - mappings: - _doc: - properties: - count: - type: integer - store: true - - - do: - index: - index: test_1 - type: _doc - id: 1 - body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 } - - do: - get: { index: test_1, type: _doc, id: 1, _source_include: include.field1 } - warnings: - - "Deprecated parameter [_source_include] used, expected [_source_includes] instead" - - do: - get: { index: test_1, type: _doc, id: 1, _source_includes: include, _source_exclude: "*.field2" } - warnings: - - "Deprecated parameter [_source_exclude] used, expected [_source_excludes] instead" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/270_median_absolute_deviation_metric.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/270_median_absolute_deviation_metric.yml index 412a25ffa35..b52027f98c8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/270_median_absolute_deviation_metric.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/270_median_absolute_deviation_metric.yml @@ -1,6 +1,6 @@ setup: - skip: - version: " - 6.6.0" + version: " - 6.5.99" reason: "added in 6.6.0" - do: indices.create: diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index cc1bb9bb17f..4e589f613a6 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -384,7 +384,7 @@ public class ActionModule extends AbstractModule { if (transportClient) { restController = null; } else { - restController = new RestController(settings, headers, restWrapper, nodeClient, circuitBreakerService, usageService); + restController = new RestController(headers, restWrapper, nodeClient, circuitBreakerService, usageService); } } diff --git a/server/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java b/server/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java index 7d8dbd1f975..a4c3e17e802 100644 --- a/server/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java +++ b/server/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java @@ -20,7 +20,6 @@ package org.elasticsearch.action; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; @@ -28,14 +27,13 @@ import org.elasticsearch.transport.TransportService; /** * A generic proxy that will execute the given action against a specific node. */ -public class TransportActionNodeProxy extends AbstractComponent { +public class TransportActionNodeProxy { private final TransportService transportService; private final Action action; private final TransportRequestOptions transportOptions; public TransportActionNodeProxy(Settings settings, Action action, TransportService transportService) { - super(settings); this.action = action; this.transportService = transportService; this.transportOptions = action.transportOptions(settings); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java index 22e8554ed6a..dfe0a25c611 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java @@ -486,6 +486,9 @@ public class IndicesAliasesRequest extends AcknowledgedRequest reduceContextFunction) { - super(settings); + public SearchPhaseController(Function reduceContextFunction) { this.reduceContextFunction = reduceContextFunction; } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index 302ed4ccbfe..54d7aee8f0d 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -26,11 +26,9 @@ import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.support.HandledTransportAction.ChannelActionListener; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchService; @@ -66,7 +64,7 @@ import java.util.function.BiFunction; * An encapsulation of {@link org.elasticsearch.search.SearchService} operations exposed through * transport. */ -public class SearchTransportService extends AbstractComponent { +public class SearchTransportService { public static final String FREE_CONTEXT_SCROLL_ACTION_NAME = "indices:data/read/search[free_context/scroll]"; public static final String FREE_CONTEXT_ACTION_NAME = "indices:data/read/search[free_context]"; @@ -84,9 +82,8 @@ public class SearchTransportService extends AbstractComponent { private final BiFunction responseWrapper; private final Map clientConnections = ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency(); - public SearchTransportService(Settings settings, TransportService transportService, + public SearchTransportService(TransportService transportService, BiFunction responseWrapper) { - super(settings); this.transportService = transportService; this.responseWrapper = responseWrapper; } diff --git a/server/src/main/java/org/elasticsearch/action/support/ActionFilter.java b/server/src/main/java/org/elasticsearch/action/support/ActionFilter.java index 3e12d0cc842..c23fe476dcc 100644 --- a/server/src/main/java/org/elasticsearch/action/support/ActionFilter.java +++ b/server/src/main/java/org/elasticsearch/action/support/ActionFilter.java @@ -22,8 +22,6 @@ package org.elasticsearch.action.support; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; /** @@ -47,12 +45,7 @@ public interface ActionFilter { * filter chain. This base class should serve any action filter implementations that doesn't require * to apply async filtering logic. */ - abstract class Simple extends AbstractComponent implements ActionFilter { - - protected Simple(Settings settings) { - super(settings); - } - + abstract class Simple implements ActionFilter { @Override public final void apply(Task task, String action, Request request, ActionListener listener, ActionFilterChain chain) { diff --git a/server/src/main/java/org/elasticsearch/action/support/ActiveShardsObserver.java b/server/src/main/java/org/elasticsearch/action/support/ActiveShardsObserver.java index 30d6461ef61..b87ff9f7ec3 100644 --- a/server/src/main/java/org/elasticsearch/action/support/ActiveShardsObserver.java +++ b/server/src/main/java/org/elasticsearch/action/support/ActiveShardsObserver.java @@ -24,7 +24,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.threadpool.ThreadPool; @@ -42,8 +41,7 @@ public class ActiveShardsObserver extends AbstractComponent { private final ClusterService clusterService; private final ThreadPool threadPool; - public ActiveShardsObserver(final Settings settings, final ClusterService clusterService, final ThreadPool threadPool) { - super(settings); + public ActiveShardsObserver(final ClusterService clusterService, final ThreadPool threadPool) { this.clusterService = clusterService; this.threadPool = threadPool; } diff --git a/server/src/main/java/org/elasticsearch/action/support/DestructiveOperations.java b/server/src/main/java/org/elasticsearch/action/support/DestructiveOperations.java index 56d5bf206f3..583c34e0964 100644 --- a/server/src/main/java/org/elasticsearch/action/support/DestructiveOperations.java +++ b/server/src/main/java/org/elasticsearch/action/support/DestructiveOperations.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.support; -import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -28,7 +27,7 @@ import org.elasticsearch.common.settings.Settings; /** * Helper for dealing with destructive operations and wildcard usage. */ -public final class DestructiveOperations extends AbstractComponent { +public final class DestructiveOperations { /** * Setting which controls whether wildcard usage (*, prefix*, _all) is allowed. @@ -38,7 +37,6 @@ public final class DestructiveOperations extends AbstractComponent { private volatile boolean destructiveRequiresName; public DestructiveOperations(Settings settings, ClusterSettings clusterSettings) { - super(settings); destructiveRequiresName = REQUIRES_NAME_SETTING.get(settings); clusterSettings.addSettingsUpdateConsumer(REQUIRES_NAME_SETTING, this::setDestructiveRequiresName); } diff --git a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java index e861a9f8d3f..4b49a8057ac 100644 --- a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java @@ -39,7 +39,7 @@ public abstract class TransportAction clusterPlugins, ClusterInfoService clusterInfoService) { this.deciderList = createAllocationDeciders(settings, clusterService.getClusterSettings(), clusterPlugins); - this.allocationDeciders = new AllocationDeciders(settings, deciderList); + this.allocationDeciders = new AllocationDeciders(deciderList); this.shardsAllocator = createShardsAllocator(settings, clusterService.getClusterSettings(), clusterPlugins); this.clusterService = clusterService; - this.indexNameExpressionResolver = new IndexNameExpressionResolver(settings); - this.allocationService = new AllocationService(settings, allocationDeciders, shardsAllocator, clusterInfoService); + this.indexNameExpressionResolver = new IndexNameExpressionResolver(); + this.allocationService = new AllocationService(allocationDeciders, shardsAllocator, clusterInfoService); } public static List getNamedWriteables() { @@ -205,16 +205,16 @@ public class ClusterModule extends AbstractModule { List clusterPlugins) { // collect deciders by class so that we can detect duplicates Map deciders = new LinkedHashMap<>(); - addAllocationDecider(deciders, new MaxRetryAllocationDecider(settings)); - addAllocationDecider(deciders, new ResizeAllocationDecider(settings)); - addAllocationDecider(deciders, new ReplicaAfterPrimaryActiveAllocationDecider(settings)); - addAllocationDecider(deciders, new RebalanceOnlyWhenActiveAllocationDecider(settings)); + addAllocationDecider(deciders, new MaxRetryAllocationDecider()); + addAllocationDecider(deciders, new ResizeAllocationDecider()); + addAllocationDecider(deciders, new ReplicaAfterPrimaryActiveAllocationDecider()); + addAllocationDecider(deciders, new RebalanceOnlyWhenActiveAllocationDecider()); addAllocationDecider(deciders, new ClusterRebalanceAllocationDecider(settings, clusterSettings)); addAllocationDecider(deciders, new ConcurrentRebalanceAllocationDecider(settings, clusterSettings)); addAllocationDecider(deciders, new EnableAllocationDecider(settings, clusterSettings)); - addAllocationDecider(deciders, new NodeVersionAllocationDecider(settings)); - addAllocationDecider(deciders, new SnapshotInProgressAllocationDecider(settings)); - addAllocationDecider(deciders, new RestoreInProgressAllocationDecider(settings)); + addAllocationDecider(deciders, new NodeVersionAllocationDecider()); + addAllocationDecider(deciders, new SnapshotInProgressAllocationDecider()); + addAllocationDecider(deciders, new RestoreInProgressAllocationDecider()); addAllocationDecider(deciders, new FilterAllocationDecider(settings, clusterSettings)); addAllocationDecider(deciders, new SameShardAllocationDecider(settings, clusterSettings)); addAllocationDecider(deciders, new DiskThresholdDecider(settings, clusterSettings)); diff --git a/server/src/main/java/org/elasticsearch/cluster/EmptyClusterInfoService.java b/server/src/main/java/org/elasticsearch/cluster/EmptyClusterInfoService.java index 8e705a24063..c6632bd524a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/EmptyClusterInfoService.java +++ b/server/src/main/java/org/elasticsearch/cluster/EmptyClusterInfoService.java @@ -19,19 +19,12 @@ package org.elasticsearch.cluster; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; - /** * ClusterInfoService that provides empty maps for disk usage and shard sizes */ -public class EmptyClusterInfoService extends AbstractComponent implements ClusterInfoService { +public class EmptyClusterInfoService implements ClusterInfoService { public static final EmptyClusterInfoService INSTANCE = new EmptyClusterInfoService(); - private EmptyClusterInfoService() { - super(Settings.EMPTY); - } - @Override public ClusterInfo getClusterInfo() { return ClusterInfo.EMPTY; diff --git a/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java b/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java index a22c2099106..184cbcdf859 100644 --- a/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java +++ b/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java @@ -88,7 +88,6 @@ public class InternalClusterInfoService extends AbstractComponent public InternalClusterInfoService(Settings settings, ClusterService clusterService, ThreadPool threadPool, NodeClient client, Consumer listener) { - super(settings); this.leastAvailableSpaceUsages = ImmutableOpenMap.of(); this.mostAvailableSpaceUsages = ImmutableOpenMap.of(); this.shardRoutingToDataPath = ImmutableOpenMap.of(); diff --git a/server/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java b/server/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java index 56311455a0e..770c6bca26b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/action/index/MappingUpdatedAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequestBuilder; import org.elasticsearch.client.Client; import org.elasticsearch.client.IndicesAdminClient; -import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; @@ -39,7 +38,7 @@ import org.elasticsearch.index.mapper.Mapping; * Called by shards in the cluster when their mapping was dynamically updated and it needs to be updated * in the cluster state meta data (and broadcast to all members). */ -public class MappingUpdatedAction extends AbstractComponent { +public class MappingUpdatedAction { public static final Setting INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING = Setting.positiveTimeSetting("indices.mapping.dynamic_timeout", TimeValue.timeValueSeconds(30), @@ -50,7 +49,6 @@ public class MappingUpdatedAction extends AbstractComponent { @Inject public MappingUpdatedAction(Settings settings, ClusterSettings clusterSettings) { - super(settings); this.dynamicMappingUpdateTimeout = INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING.get(settings); clusterSettings.addSettingsUpdateConsumer(INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING, this::setDynamicMappingUpdateTimeout); } diff --git a/server/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java b/server/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java index 5a3e6e326bd..6dd40fba5cb 100644 --- a/server/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.EmptyTransportResponseHandler; @@ -48,8 +47,7 @@ public class NodeMappingRefreshAction extends AbstractComponent { private final MetaDataMappingService metaDataMappingService; @Inject - public NodeMappingRefreshAction(Settings settings, TransportService transportService, MetaDataMappingService metaDataMappingService) { - super(settings); + public NodeMappingRefreshAction(TransportService transportService, MetaDataMappingService metaDataMappingService) { this.transportService = transportService; this.metaDataMappingService = metaDataMappingService; transportService.registerRequestHandler(ACTION_NAME, diff --git a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 81d6a806143..3780a8bb9f4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -46,7 +46,6 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.discovery.Discovery; @@ -89,9 +88,8 @@ public class ShardStateAction extends AbstractComponent { private final ConcurrentMap remoteFailedShardsCache = ConcurrentCollections.newConcurrentMap(); @Inject - public ShardStateAction(Settings settings, ClusterService clusterService, TransportService transportService, + public ShardStateAction(ClusterService clusterService, TransportService transportService, AllocationService allocationService, RoutingService routingService, ThreadPool threadPool) { - super(settings); this.transportService = transportService; this.clusterService = clusterService; this.threadPool = threadPool; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java index 766b35307cd..c9258806d51 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java @@ -22,8 +22,6 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; @@ -43,12 +41,7 @@ import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQuery * Validator for an alias, to be used before adding an alias to the index metadata * and make sure the alias is valid */ -public class AliasValidator extends AbstractComponent { - - public AliasValidator(Settings settings) { - super(settings); - } - +public class AliasValidator { /** * Allows to validate an {@link org.elasticsearch.action.admin.indices.alias.Alias} and make sure * it's valid before it gets added to the index metadata. Doesn't validate the alias filter. diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ClusterNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ClusterNameExpressionResolver.java index 2032c2f4ef3..9cbf46b6afc 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ClusterNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ClusterNameExpressionResolver.java @@ -19,9 +19,7 @@ package org.elasticsearch.cluster.metadata; -import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.settings.Settings; import java.util.ArrayList; import java.util.Collections; @@ -33,14 +31,10 @@ import java.util.stream.Collectors; * Resolves cluster names from an expression. The expression must be the exact match of a cluster * name or must be a wildcard expression. */ -public final class ClusterNameExpressionResolver extends AbstractComponent { +public final class ClusterNameExpressionResolver { private final WildcardExpressionResolver wildcardResolver = new WildcardExpressionResolver(); - public ClusterNameExpressionResolver(Settings settings) { - super(settings); - } - /** * Resolves the provided cluster expression to matching cluster names. This method only * supports exact or wildcard matches. diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 66bd86b7961..300ea3fc883 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -26,9 +26,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.time.DateMathParser; @@ -54,18 +52,14 @@ import java.util.SortedMap; import java.util.function.Predicate; import java.util.stream.Collectors; -public class IndexNameExpressionResolver extends AbstractComponent { +import static java.util.Collections.unmodifiableList; - private final List expressionResolvers; - private final DateMathExpressionResolver dateMathExpressionResolver; +public class IndexNameExpressionResolver { - public IndexNameExpressionResolver(Settings settings) { - super(settings); - expressionResolvers = Arrays.asList( - dateMathExpressionResolver = new DateMathExpressionResolver(), - new WildcardExpressionResolver() - ); - } + private final DateMathExpressionResolver dateMathExpressionResolver = new DateMathExpressionResolver(); + private final List expressionResolvers = unmodifiableList(Arrays.asList( + dateMathExpressionResolver, + new WildcardExpressionResolver())); /** * Same as {@link #concreteIndexNames(ClusterState, IndicesOptions, String...)}, but the index expressions and options diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index 41d57323a05..08e81eb9331 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -128,7 +128,6 @@ public class MetaDataCreateIndexService extends AbstractComponent { final ThreadPool threadPool, final NamedXContentRegistry xContentRegistry, final boolean forbidPrivateIndexSettings) { - super(settings); this.settings = settings; this.clusterService = clusterService; this.indicesService = indicesService; @@ -136,7 +135,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { this.aliasValidator = aliasValidator; this.env = env; this.indexScopedSettings = indexScopedSettings; - this.activeShardsObserver = new ActiveShardsObserver(settings, clusterService, threadPool); + this.activeShardsObserver = new ActiveShardsObserver(clusterService, threadPool); this.xContentRegistry = xContentRegistry; this.forbidPrivateIndexSettings = forbidPrivateIndexSettings; } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java index df63e06dab2..39563ca7037 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataDeleteIndexService.java @@ -55,7 +55,6 @@ public class MetaDataDeleteIndexService extends AbstractComponent { @Inject public MetaDataDeleteIndexService(Settings settings, ClusterService clusterService, AllocationService allocationService) { - super(settings); this.settings = settings; this.clusterService = clusterService; this.allocationService = allocationService; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java index 28dc7f2425d..e6d0fc08324 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java @@ -29,9 +29,7 @@ import org.elasticsearch.cluster.metadata.AliasAction.NewAliasValidator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -54,7 +52,7 @@ import static org.elasticsearch.indices.cluster.IndicesClusterStateService.Alloc /** * Service responsible for submitting add and remove aliases requests */ -public class MetaDataIndexAliasesService extends AbstractComponent { +public class MetaDataIndexAliasesService { private final ClusterService clusterService; @@ -67,9 +65,8 @@ public class MetaDataIndexAliasesService extends AbstractComponent { private final NamedXContentRegistry xContentRegistry; @Inject - public MetaDataIndexAliasesService(Settings settings, ClusterService clusterService, IndicesService indicesService, + public MetaDataIndexAliasesService(ClusterService clusterService, IndicesService indicesService, AliasValidator aliasValidator, MetaDataDeleteIndexService deleteIndexService, NamedXContentRegistry xContentRegistry) { - super(settings); this.clusterService = clusterService; this.indicesService = indicesService; this.aliasValidator = aliasValidator; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java index 6badfd6ef58..38d83b39885 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java @@ -42,7 +42,6 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.rest.RestStatus; @@ -76,15 +75,14 @@ public class MetaDataIndexStateService extends AbstractComponent { private final ActiveShardsObserver activeShardsObserver; @Inject - public MetaDataIndexStateService(Settings settings, ClusterService clusterService, AllocationService allocationService, + public MetaDataIndexStateService(ClusterService clusterService, AllocationService allocationService, MetaDataIndexUpgradeService metaDataIndexUpgradeService, IndicesService indicesService, ThreadPool threadPool) { - super(settings); this.indicesService = indicesService; this.clusterService = clusterService; this.allocationService = allocationService; this.metaDataIndexUpgradeService = metaDataIndexUpgradeService; - this.activeShardsObserver = new ActiveShardsObserver(settings, clusterService, threadPool); + this.activeShardsObserver = new ActiveShardsObserver(clusterService, threadPool); } public void closeIndex(final CloseIndexClusterStateUpdateRequest request, final ActionListener listener) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java index 40d2a697140..e397c150c1c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java @@ -71,11 +71,10 @@ public class MetaDataIndexTemplateService extends AbstractComponent { private final NamedXContentRegistry xContentRegistry; @Inject - public MetaDataIndexTemplateService(Settings settings, ClusterService clusterService, + public MetaDataIndexTemplateService(ClusterService clusterService, MetaDataCreateIndexService metaDataCreateIndexService, AliasValidator aliasValidator, IndicesService indicesService, IndexScopedSettings indexScopedSettings, NamedXContentRegistry xContentRegistry) { - super(settings); this.clusterService = clusterService; this.aliasValidator = aliasValidator; this.indicesService = indicesService; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java index 09da16ddd59..84fa0626317 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java @@ -62,7 +62,6 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { public MetaDataIndexUpgradeService(Settings settings, NamedXContentRegistry xContentRegistry, MapperRegistry mapperRegistry, IndexScopedSettings indexScopedSettings, Collection> indexMetaDataUpgraders) { - super(settings); this.settings = settings; this.xContentRegistry = xContentRegistry; this.mapperRegistry = mapperRegistry; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index 3cb8ea9e91d..8a65ae874f0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -35,7 +35,6 @@ import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; @@ -68,8 +67,7 @@ public class MetaDataMappingService extends AbstractComponent { @Inject - public MetaDataMappingService(Settings settings, ClusterService clusterService, IndicesService indicesService) { - super(settings); + public MetaDataMappingService(ClusterService clusterService, IndicesService indicesService) { this.clusterService = clusterService; this.indicesService = indicesService; } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java index b2bc082f284..2284d507afa 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java @@ -75,9 +75,8 @@ public class MetaDataUpdateSettingsService extends AbstractComponent { private final ThreadPool threadPool; @Inject - public MetaDataUpdateSettingsService(Settings settings, ClusterService clusterService, AllocationService allocationService, + public MetaDataUpdateSettingsService(ClusterService clusterService, AllocationService allocationService, IndexScopedSettings indexScopedSettings, IndicesService indicesService, ThreadPool threadPool) { - super(settings); this.clusterService = clusterService; this.threadPool = threadPool; this.allocationService = allocationService; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/TemplateUpgradeService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/TemplateUpgradeService.java index 1f042b4c576..9026d26a11f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/TemplateUpgradeService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/TemplateUpgradeService.java @@ -35,7 +35,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.ToXContent; @@ -75,9 +74,8 @@ public class TemplateUpgradeService extends AbstractComponent implements Cluster private ImmutableOpenMap lastTemplateMetaData; - public TemplateUpgradeService(Settings settings, Client client, ClusterService clusterService, ThreadPool threadPool, + public TemplateUpgradeService(Client client, ClusterService clusterService, ThreadPool threadPool, Collection>> indexTemplateMetaDataUpgraders) { - super(settings); this.client = client; this.clusterService = clusterService; this.threadPool = threadPool; diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java index ac3ffcec7a9..81f7f68593b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java @@ -54,7 +54,6 @@ public class OperationRouting extends AbstractComponent { private boolean useAdaptiveReplicaSelection; public OperationRouting(Settings settings, ClusterSettings clusterSettings) { - super(settings); this.awarenessAttributes = AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING.get(settings); this.useAdaptiveReplicaSelection = USE_ADAPTIVE_REPLICA_SELECTION_SETTING.get(settings); clusterSettings.addSettingsUpdateConsumer(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING, diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java index 8c3225dc77f..0bc94a93cc5 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java @@ -39,7 +39,6 @@ import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.GatewayAllocator; import java.util.ArrayList; @@ -69,16 +68,15 @@ public class AllocationService extends AbstractComponent { private final ShardsAllocator shardsAllocator; private final ClusterInfoService clusterInfoService; - public AllocationService(Settings settings, AllocationDeciders allocationDeciders, + public AllocationService(AllocationDeciders allocationDeciders, GatewayAllocator gatewayAllocator, ShardsAllocator shardsAllocator, ClusterInfoService clusterInfoService) { - this(settings, allocationDeciders, shardsAllocator, clusterInfoService); + this(allocationDeciders, shardsAllocator, clusterInfoService); setGatewayAllocator(gatewayAllocator); } - public AllocationService(Settings settings, AllocationDeciders allocationDeciders, + public AllocationService(AllocationDeciders allocationDeciders, ShardsAllocator shardsAllocator, ClusterInfoService clusterInfoService) { - super(settings); this.allocationDeciders = allocationDeciders; this.shardsAllocator = shardsAllocator; this.clusterInfoService = clusterInfoService; diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitor.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitor.java index 0ddf3ef1529..f2447a9c4e5 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitor.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitor.java @@ -54,7 +54,6 @@ public class DiskThresholdMonitor extends AbstractComponent { public DiskThresholdMonitor(Settings settings, Supplier clusterStateSupplier, ClusterSettings clusterSettings, Client client) { - super(settings); this.clusterStateSupplier = clusterStateSupplier; this.diskThresholdSettings = new DiskThresholdSettings(settings, clusterSettings); this.client = client; diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 4ddeb8de073..597e904e24e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -97,7 +97,6 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards @Inject public BalancedShardsAllocator(Settings settings, ClusterSettings clusterSettings) { - super(settings); setWeightFunction(INDEX_BALANCE_FACTOR_SETTING.get(settings), SHARD_BALANCE_FACTOR_SETTING.get(settings)); setThreshold(THRESHOLD_SETTING.get(settings)); clusterSettings.addSettingsUpdateConsumer(INDEX_BALANCE_FACTOR_SETTING, SHARD_BALANCE_FACTOR_SETTING, this::setWeightFunction); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecider.java index 12cac56e11a..3ae86d60bd9 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecider.java @@ -19,13 +19,12 @@ package org.elasticsearch.cluster.routing.allocation.decider; +import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.Decision.Type; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; /** * {@link AllocationDecider} is an abstract base class that allows to make @@ -33,15 +32,6 @@ import org.elasticsearch.common.settings.Settings; * basis. */ public abstract class AllocationDecider extends AbstractComponent { - - /** - * Initializes a new {@link AllocationDecider} - * @param settings {@link Settings} used by this {@link AllocationDecider} - */ - protected AllocationDecider(Settings settings) { - super(settings); - } - /** * Returns a {@link Decision} whether the given shard routing can be * re-balanced to the given allocation. The default is diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDeciders.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDeciders.java index ee7f761d65d..7f91be340fd 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDeciders.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDeciders.java @@ -23,7 +23,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.common.settings.Settings; import java.util.Collection; import java.util.Collections; @@ -38,8 +37,7 @@ public class AllocationDeciders extends AllocationDecider { private final Collection allocations; - public AllocationDeciders(Settings settings, Collection allocations) { - super(settings); + public AllocationDeciders(Collection allocations) { this.allocations = Collections.unmodifiableCollection(allocations); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java index 6105c732d55..8994bbf1e45 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java @@ -92,7 +92,6 @@ public class AwarenessAllocationDecider extends AllocationDecider { private volatile Map> forcedAwarenessAttributes; public AwarenessAllocationDecider(Settings settings, ClusterSettings clusterSettings) { - super(settings); this.awarenessAttributes = CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING.get(settings); clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING, this::setAwarenessAttributes); setForcedAwarenessAttributes(CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING.get(settings)); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java index ea945c23c72..1ea369c75d9 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java @@ -91,7 +91,6 @@ public class ClusterRebalanceAllocationDecider extends AllocationDecider { private volatile ClusterRebalanceType type; public ClusterRebalanceAllocationDecider(Settings settings, ClusterSettings clusterSettings) { - super(settings); try { type = CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.get(settings); } catch (IllegalStateException e) { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java index 15456ec3e11..a11b3dcf102 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java @@ -48,7 +48,6 @@ public class ConcurrentRebalanceAllocationDecider extends AllocationDecider { private volatile int clusterConcurrentRebalance; public ConcurrentRebalanceAllocationDecider(Settings settings, ClusterSettings clusterSettings) { - super(settings); this.clusterConcurrentRebalance = CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING.get(settings); logger.debug("using [cluster_concurrent_rebalance] with [{}]", clusterConcurrentRebalance); clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING, diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index a7426d3e551..9676eaf4df1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -73,7 +73,6 @@ public class DiskThresholdDecider extends AllocationDecider { private final DiskThresholdSettings diskThresholdSettings; public DiskThresholdDecider(Settings settings, ClusterSettings clusterSettings) { - super(settings); this.diskThresholdSettings = new DiskThresholdSettings(settings, clusterSettings); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java index d0fbe6761b2..8a72fe8cb49 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java @@ -80,7 +80,6 @@ public class EnableAllocationDecider extends AllocationDecider { private volatile Allocation enableAllocation; public EnableAllocationDecider(Settings settings, ClusterSettings clusterSettings) { - super(settings); this.enableAllocation = CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.get(settings); this.enableRebalance = CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.get(settings); clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING, this::setEnableAllocation); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java index ed2d5384fa7..053d696f676 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java @@ -97,7 +97,6 @@ public class FilterAllocationDecider extends AllocationDecider { private volatile DiscoveryNodeFilters clusterExcludeFilters; public FilterAllocationDecider(Settings settings, ClusterSettings clusterSettings) { - super(settings); setClusterRequireFilters(CLUSTER_ROUTING_REQUIRE_GROUP_SETTING.getAsMap(settings)); setClusterExcludeFilters(CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING.getAsMap(settings)); setClusterIncludeFilters(CLUSTER_ROUTING_INCLUDE_GROUP_SETTING.getAsMap(settings)); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/MaxRetryAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/MaxRetryAllocationDecider.java index 4c580509e92..708482feae7 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/MaxRetryAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/MaxRetryAllocationDecider.java @@ -25,7 +25,6 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; /** * An allocation decider that prevents shards from being allocated on any node if the shards allocation has been retried N times without @@ -42,15 +41,6 @@ public class MaxRetryAllocationDecider extends AllocationDecider { public static final String NAME = "max_retry"; - /** - * Initializes a new {@link MaxRetryAllocationDecider} - * - * @param settings {@link Settings} used by this {@link AllocationDecider} - */ - public MaxRetryAllocationDecider(Settings settings) { - super(settings); - } - @Override public Decision canAllocate(ShardRouting shardRouting, RoutingAllocation allocation) { final UnassignedInfo unassignedInfo = shardRouting.unassignedInfo(); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeVersionAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeVersionAllocationDecider.java index f2df6d3196d..e2817eb87a7 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeVersionAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/NodeVersionAllocationDecider.java @@ -25,7 +25,6 @@ import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.common.settings.Settings; /** * An allocation decider that prevents relocation or allocation from nodes @@ -38,10 +37,6 @@ public class NodeVersionAllocationDecider extends AllocationDecider { public static final String NAME = "node_version"; - public NodeVersionAllocationDecider(Settings settings) { - super(settings); - } - @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { if (shardRouting.primary()) { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/RebalanceOnlyWhenActiveAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/RebalanceOnlyWhenActiveAllocationDecider.java index c4cd2ecf50d..3d890067bbf 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/RebalanceOnlyWhenActiveAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/RebalanceOnlyWhenActiveAllocationDecider.java @@ -21,7 +21,6 @@ package org.elasticsearch.cluster.routing.allocation.decider; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.common.settings.Settings; /** * Only allow rebalancing when all shards are active within the shard replication group. @@ -30,10 +29,6 @@ public class RebalanceOnlyWhenActiveAllocationDecider extends AllocationDecider public static final String NAME = "rebalance_only_when_active"; - public RebalanceOnlyWhenActiveAllocationDecider(Settings settings) { - super(settings); - } - @Override public Decision canRebalance(ShardRouting shardRouting, RoutingAllocation allocation) { if (!allocation.routingNodes().allReplicasActive(shardRouting.shardId(), allocation.metaData())) { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ReplicaAfterPrimaryActiveAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ReplicaAfterPrimaryActiveAllocationDecider.java index 4cceb1cc161..2bc81b84570 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ReplicaAfterPrimaryActiveAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ReplicaAfterPrimaryActiveAllocationDecider.java @@ -22,7 +22,6 @@ package org.elasticsearch.cluster.routing.allocation.decider; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.common.settings.Settings; /** * An allocation strategy that only allows for a replica to be allocated when the primary is active. @@ -31,10 +30,6 @@ public class ReplicaAfterPrimaryActiveAllocationDecider extends AllocationDecide private static final String NAME = "replica_after_primary_active"; - public ReplicaAfterPrimaryActiveAllocationDecider(Settings settings) { - super(settings); - } - @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { return canAllocate(shardRouting, allocation); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ResizeAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ResizeAllocationDecider.java index 8babcd5484f..d58a625c6ed 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ResizeAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ResizeAllocationDecider.java @@ -26,11 +26,9 @@ import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; - /** * An allocation decider that ensures we allocate the shards of a target index for resize operations next to the source primaries */ @@ -38,15 +36,6 @@ public class ResizeAllocationDecider extends AllocationDecider { public static final String NAME = "resize"; - /** - * Initializes a new {@link ResizeAllocationDecider} - * - * @param settings {@link Settings} used by this {@link AllocationDecider} - */ - public ResizeAllocationDecider(Settings settings) { - super(settings); - } - @Override public Decision canAllocate(ShardRouting shardRouting, RoutingAllocation allocation) { return canAllocate(shardRouting, null, allocation); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDecider.java index 3fefd4e0abb..63971ca46e4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDecider.java @@ -24,7 +24,6 @@ import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.snapshots.Snapshot; /** @@ -35,16 +34,6 @@ public class RestoreInProgressAllocationDecider extends AllocationDecider { public static final String NAME = "restore_in_progress"; - /** - * Creates a new {@link RestoreInProgressAllocationDecider} instance from - * given settings - * - * @param settings {@link Settings} to use - */ - public RestoreInProgressAllocationDecider(Settings settings) { - super(settings); - } - @Override public Decision canAllocate(final ShardRouting shardRouting, final RoutingNode node, final RoutingAllocation allocation) { return canAllocate(shardRouting, allocation); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SameShardAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SameShardAllocationDecider.java index cc2d488974b..2961b3faaf4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SameShardAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SameShardAllocationDecider.java @@ -53,7 +53,6 @@ public class SameShardAllocationDecider extends AllocationDecider { private volatile boolean sameHost; public SameShardAllocationDecider(Settings settings, ClusterSettings clusterSettings) { - super(settings); this.sameHost = CLUSTER_ROUTING_ALLOCATION_SAME_HOST_SETTING.get(settings); clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_SAME_HOST_SETTING, this::setSameHost); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java index 33ecccd3bfc..1c0a0c0ef0a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java @@ -76,7 +76,6 @@ public class ShardsLimitAllocationDecider extends AllocationDecider { private final Settings settings; public ShardsLimitAllocationDecider(Settings settings, ClusterSettings clusterSettings) { - super(settings); this.settings = settings; this.clusterShardLimit = CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING.get(settings); clusterSettings.addSettingsUpdateConsumer(CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING, this::setClusterShardLimit); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java index eb4cc0c4420..7eb1b882d1f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java @@ -23,7 +23,6 @@ import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.common.settings.Settings; /** * This {@link org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider} prevents shards that @@ -33,16 +32,6 @@ public class SnapshotInProgressAllocationDecider extends AllocationDecider { public static final String NAME = "snapshot_in_progress"; - /** - * Creates a new {@link org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAllocationDecider} instance from - * given settings - * - * @param settings {@link org.elasticsearch.common.settings.Settings} to use - */ - public SnapshotInProgressAllocationDecider(Settings settings) { - super(settings); - } - /** * Returns a {@link Decision} whether the given shard routing can be * re-balanced to the given allocation. The default is diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java index 7821ad11a52..0d67cd6071f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java @@ -81,7 +81,6 @@ public class ThrottlingAllocationDecider extends AllocationDecider { private volatile int concurrentOutgoingRecoveries; public ThrottlingAllocationDecider(Settings settings, ClusterSettings clusterSettings) { - super(settings); this.primariesInitialRecoveries = CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING.get(settings); concurrentIncomingRecoveries = CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.get(settings); concurrentOutgoingRecoveries = CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.get(settings); diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java index c49143edb44..cffdf0f4507 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobStore.java @@ -42,7 +42,6 @@ public class FsBlobStore extends AbstractComponent implements BlobStore { private final boolean readOnly; public FsBlobStore(Settings settings, Path path) throws IOException { - super(settings); this.path = path; this.readOnly = settings.getAsBoolean("readonly", false); if (!this.readOnly) { diff --git a/server/src/main/java/org/elasticsearch/common/component/AbstractComponent.java b/server/src/main/java/org/elasticsearch/common/component/AbstractComponent.java index 622834d9198..1e0310c0247 100644 --- a/server/src/main/java/org/elasticsearch/common/component/AbstractComponent.java +++ b/server/src/main/java/org/elasticsearch/common/component/AbstractComponent.java @@ -21,13 +21,16 @@ package org.elasticsearch.common.component; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; -import org.elasticsearch.common.settings.Settings; +/** + * @deprecated declare your own logger + */ +@Deprecated public abstract class AbstractComponent { protected final Logger logger; - public AbstractComponent(Settings settings) { + public AbstractComponent() { this.logger = LogManager.getLogger(getClass()); } } diff --git a/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java b/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java index 8a472954ab4..1b06ade9aeb 100644 --- a/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java +++ b/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java @@ -32,7 +32,7 @@ public abstract class AbstractLifecycleComponent extends AbstractComponent imple private final List listeners = new CopyOnWriteArrayList<>(); protected AbstractLifecycleComponent(Settings settings) { - super(settings); + // TODO drop settings from ctor } @Override diff --git a/server/src/main/java/org/elasticsearch/common/logging/Loggers.java b/server/src/main/java/org/elasticsearch/common/logging/Loggers.java index 28df6206e9b..9a5ab873083 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/Loggers.java +++ b/server/src/main/java/org/elasticsearch/common/logging/Loggers.java @@ -76,15 +76,6 @@ public class Loggers { return ESLoggerFactory.getLogger(prefix, parentLogger.getName() + s); } - /** - * Get or build a logger. - * @deprecated Prefer {@link LogManager#getLogger} - */ - @Deprecated - public static Logger getLogger(Class clazz) { - return ESLoggerFactory.getLogger(null, clazz); - } - private static String formatPrefix(String... prefixes) { String prefix = null; if (prefixes != null && prefixes.length > 0) { diff --git a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index d9ecbcfa922..9667341cd6e 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -66,7 +66,6 @@ public abstract class AbstractScopedSettings extends AbstractComponent { final Set> settingsSet, final Set> settingUpgraders, final Setting.Property scope) { - super(settings); this.settings = settings; this.lastSettingsApplied = Settings.EMPTY; @@ -107,7 +106,6 @@ public abstract class AbstractScopedSettings extends AbstractComponent { } protected AbstractScopedSettings(Settings nodeSettings, Settings scopeSettings, AbstractScopedSettings other) { - super(nodeSettings); this.settings = nodeSettings; this.lastSettingsApplied = scopeSettings; this.scope = other.scope; diff --git a/server/src/main/java/org/elasticsearch/common/settings/SettingsFilter.java b/server/src/main/java/org/elasticsearch/common/settings/SettingsFilter.java index 1c67318e282..fc68d58d5de 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/SettingsFilter.java +++ b/server/src/main/java/org/elasticsearch/common/settings/SettingsFilter.java @@ -16,10 +16,10 @@ * specific language governing permissions and limitations * under the License. */ + package org.elasticsearch.common.settings; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.rest.RestRequest; @@ -36,7 +36,7 @@ import java.util.Set; * A class that allows to filter settings objects by simple regular expression patterns or full settings keys. * It's used for response filtering on the rest layer to for instance filter out sensitive information like access keys. */ -public final class SettingsFilter extends AbstractComponent { +public final class SettingsFilter { /** * Can be used to specify settings filter that will be used to filter out matching settings in toXContent method */ @@ -45,8 +45,7 @@ public final class SettingsFilter extends AbstractComponent { private final Set patterns; private final String patternString; - public SettingsFilter(Settings settings, Collection patterns) { - super(settings); + public SettingsFilter(Collection patterns) { for (String pattern : patterns) { if (isValidPattern(pattern) == false) { throw new IllegalArgumentException("invalid pattern: " + pattern); diff --git a/server/src/main/java/org/elasticsearch/common/settings/SettingsModule.java b/server/src/main/java/org/elasticsearch/common/settings/SettingsModule.java index 086346f470a..6a78e81d7f3 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/SettingsModule.java +++ b/server/src/main/java/org/elasticsearch/common/settings/SettingsModule.java @@ -146,7 +146,7 @@ public class SettingsModule implements Module { } // by now we are fully configured, lets check node level settings for unregistered index settings clusterSettings.validate(settings, true); - this.settingsFilter = new SettingsFilter(settings, settingsFilterPattern); + this.settingsFilter = new SettingsFilter(settingsFilterPattern); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/util/PageCacheRecycler.java b/server/src/main/java/org/elasticsearch/common/util/PageCacheRecycler.java index be69cf5b95a..6f2e28e97cb 100644 --- a/server/src/main/java/org/elasticsearch/common/util/PageCacheRecycler.java +++ b/server/src/main/java/org/elasticsearch/common/util/PageCacheRecycler.java @@ -66,7 +66,6 @@ public class PageCacheRecycler extends AbstractComponent implements Releasable { } public PageCacheRecycler(Settings settings) { - super(settings); final Type type = TYPE_SETTING.get(settings); final long limit = LIMIT_HEAP_SETTING.get(settings).getBytes(); final int availableProcessors = EsExecutors.numberOfProcessors(settings); diff --git a/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java b/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java index 7256bb16747..91001d86b0f 100644 --- a/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java +++ b/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java @@ -77,7 +77,7 @@ public class DiscoveryModule { final Collection> joinValidators = new ArrayList<>(); final Map> hostProviders = new HashMap<>(); hostProviders.put("settings", () -> new SettingsBasedHostsProvider(settings, transportService)); - hostProviders.put("file", () -> new FileBasedUnicastHostsProvider(settings, configFile)); + hostProviders.put("file", () -> new FileBasedUnicastHostsProvider(configFile)); for (DiscoveryPlugin plugin : plugins) { plugin.getZenHostsProviders(transportService, networkService).entrySet().forEach(entry -> { if (hostProviders.put(entry.getKey(), entry.getValue()) != null) { diff --git a/server/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java b/server/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java index 5b7613587cd..ebc64fa3af1 100644 --- a/server/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java +++ b/server/src/main/java/org/elasticsearch/discovery/DiscoverySettings.java @@ -72,7 +72,6 @@ public class DiscoverySettings extends AbstractComponent { private volatile boolean publishDiff; public DiscoverySettings(Settings settings, ClusterSettings clusterSettings) { - super(settings); clusterSettings.addSettingsUpdateConsumer(NO_MASTER_BLOCK_SETTING, this::setNoMasterBlock); clusterSettings.addSettingsUpdateConsumer(PUBLISH_DIFF_ENABLE_SETTING, this::setPublishDiff); clusterSettings.addSettingsUpdateConsumer(COMMIT_TIMEOUT_SETTING, this::setCommitTimeout); diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/ElectMasterService.java b/server/src/main/java/org/elasticsearch/discovery/zen/ElectMasterService.java index ebce175e981..8f2853904fa 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/ElectMasterService.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/ElectMasterService.java @@ -98,7 +98,6 @@ public class ElectMasterService extends AbstractComponent { } public ElectMasterService(Settings settings) { - super(settings); this.minimumMasterNodes = DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.get(settings); logger.debug("using minimum_master_nodes [{}]", minimumMasterNodes); } diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/FaultDetection.java b/server/src/main/java/org/elasticsearch/discovery/zen/FaultDetection.java index 5d9b1687e42..3d389fc8141 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/FaultDetection.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/FaultDetection.java @@ -66,7 +66,6 @@ public abstract class FaultDetection extends AbstractComponent implements Closea protected final int pingRetryCount; public FaultDetection(Settings settings, ThreadPool threadPool, TransportService transportService, ClusterName clusterName) { - super(settings); this.threadPool = threadPool; this.transportService = transportService; this.clusterName = clusterName; diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/FileBasedUnicastHostsProvider.java b/server/src/main/java/org/elasticsearch/discovery/zen/FileBasedUnicastHostsProvider.java index f9b20580ecd..c80fceea756 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/FileBasedUnicastHostsProvider.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/FileBasedUnicastHostsProvider.java @@ -21,7 +21,6 @@ package org.elasticsearch.discovery.zen; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import java.io.IOException; @@ -50,8 +49,7 @@ public class FileBasedUnicastHostsProvider extends AbstractComponent implements private final Path unicastHostsFilePath; - public FileBasedUnicastHostsProvider(Settings settings, Path configFile) { - super(settings); + public FileBasedUnicastHostsProvider(Path configFile) { this.unicastHostsFilePath = configFile.resolve(UNICAST_HOSTS_FILE); } diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java b/server/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java index e8bafea66d3..f699e547bf4 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -67,9 +66,8 @@ public class MembershipAction extends AbstractComponent { private final MembershipListener listener; - public MembershipAction(Settings settings, TransportService transportService, MembershipListener listener, + public MembershipAction(TransportService transportService, MembershipListener listener, Collection> joinValidators) { - super(settings); this.transportService = transportService; this.listener = listener; diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java b/server/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java index 5cceba237e5..ecf52a69753 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java @@ -35,7 +35,6 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.MasterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.DiscoverySettings; @@ -65,9 +64,7 @@ public class NodeJoinController extends AbstractComponent { private ElectionContext electionContext = null; - public NodeJoinController(MasterService masterService, AllocationService allocationService, ElectMasterService electMaster, - Settings settings) { - super(settings); + public NodeJoinController(MasterService masterService, AllocationService allocationService, ElectMasterService electMaster) { this.masterService = masterService; joinTaskExecutor = new JoinTaskExecutor(allocationService, electMaster, logger); } diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java b/server/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java index 5e9f960e893..ca014af53d8 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java @@ -38,7 +38,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.discovery.AckClusterStatePublishResponseHandler; @@ -97,12 +96,10 @@ public class PublishClusterStateAction extends AbstractComponent { private final AtomicLong compatibleClusterStateDiffReceivedCount = new AtomicLong(); public PublishClusterStateAction( - Settings settings, TransportService transportService, NamedWriteableRegistry namedWriteableRegistry, IncomingClusterStateListener incomingClusterStateListener, DiscoverySettings discoverySettings) { - super(settings); this.transportService = transportService; this.namedWriteableRegistry = namedWriteableRegistry; this.incomingClusterStateListener = incomingClusterStateListener; diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/SettingsBasedHostsProvider.java b/server/src/main/java/org/elasticsearch/discovery/zen/SettingsBasedHostsProvider.java index 6d6453c776e..a11e255f888 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/SettingsBasedHostsProvider.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/SettingsBasedHostsProvider.java @@ -52,8 +52,6 @@ public class SettingsBasedHostsProvider extends AbstractComponent implements Uni private final int limitPortCounts; public SettingsBasedHostsProvider(Settings settings, TransportService transportService) { - super(settings); - if (DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.exists(settings)) { configuredHosts = DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.get(settings); // we only limit to 1 address, makes no sense to ping 100 ports diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java index 4c0180101af..8fb9cfce0bf 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java @@ -124,7 +124,6 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { public UnicastZenPing(Settings settings, ThreadPool threadPool, TransportService transportService, UnicastHostsProvider unicastHostsProvider, PingContextProvider contextProvider) { - super(settings); this.threadPool = threadPool; this.transportService = transportService; this.clusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings); diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index 5d3bd9da684..b787835926e 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -211,15 +211,14 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover this.publishClusterState = new PublishClusterStateAction( - settings, transportService, namedWriteableRegistry, this, discoverySettings); - this.membership = new MembershipAction(settings, transportService, new MembershipListener(), onJoinValidators); + this.membership = new MembershipAction(transportService, new MembershipListener(), onJoinValidators); this.joinThreadControl = new JoinThreadControl(); - this.nodeJoinController = new NodeJoinController(masterService, allocationService, electMaster, settings); + this.nodeJoinController = new NodeJoinController(masterService, allocationService, electMaster); this.nodeRemovalExecutor = new NodeRemovalClusterStateTaskExecutor(allocationService, electMaster, this::submitRejoin, logger); masterService.setClusterStateSupplier(this::clusterState); diff --git a/server/src/main/java/org/elasticsearch/gateway/BaseGatewayShardAllocator.java b/server/src/main/java/org/elasticsearch/gateway/BaseGatewayShardAllocator.java index 275311cf6a8..ba29a08987d 100644 --- a/server/src/main/java/org/elasticsearch/gateway/BaseGatewayShardAllocator.java +++ b/server/src/main/java/org/elasticsearch/gateway/BaseGatewayShardAllocator.java @@ -29,7 +29,6 @@ import org.elasticsearch.cluster.routing.allocation.NodeAllocationResult; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import java.util.ArrayList; import java.util.List; @@ -42,11 +41,6 @@ import java.util.List; * the logic to determine to which nodes (if any) those shards are allocated. */ public abstract class BaseGatewayShardAllocator extends AbstractComponent { - - public BaseGatewayShardAllocator(Settings settings) { - super(settings); - } - /** * Allocate unassigned shards to nodes (if any) where valid copies of the shard already exist. * It is up to the individual implementations of {@link #makeAllocationDecision(ShardRouting, RoutingAllocation, Logger)} diff --git a/server/src/main/java/org/elasticsearch/gateway/DanglingIndicesState.java b/server/src/main/java/org/elasticsearch/gateway/DanglingIndicesState.java index acfcadb2f51..d9eb5013e9c 100644 --- a/server/src/main/java/org/elasticsearch/gateway/DanglingIndicesState.java +++ b/server/src/main/java/org/elasticsearch/gateway/DanglingIndicesState.java @@ -28,7 +28,6 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; @@ -60,9 +59,8 @@ public class DanglingIndicesState extends AbstractComponent implements ClusterSt private final Map danglingIndices = ConcurrentCollections.newConcurrentMap(); @Inject - public DanglingIndicesState(Settings settings, NodeEnvironment nodeEnv, MetaStateService metaStateService, + public DanglingIndicesState(NodeEnvironment nodeEnv, MetaStateService metaStateService, LocalAllocateDangledIndices allocateDangledIndices, ClusterService clusterService) { - super(settings); this.nodeEnv = nodeEnv; this.metaStateService = metaStateService; this.allocateDangledIndices = allocateDangledIndices; diff --git a/server/src/main/java/org/elasticsearch/gateway/Gateway.java b/server/src/main/java/org/elasticsearch/gateway/Gateway.java index dc44688e6cb..7a31a089903 100644 --- a/server/src/main/java/org/elasticsearch/gateway/Gateway.java +++ b/server/src/main/java/org/elasticsearch/gateway/Gateway.java @@ -49,7 +49,6 @@ public class Gateway extends AbstractComponent { public Gateway(Settings settings, ClusterService clusterService, TransportNodesListGatewayMetaState listGatewayMetaState, IndicesService indicesService) { - super(settings); this.indicesService = indicesService; this.clusterService = clusterService; this.listGatewayMetaState = listGatewayMetaState; diff --git a/server/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java b/server/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java index 4e7266a6832..dce92b1dd50 100644 --- a/server/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java +++ b/server/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java @@ -33,7 +33,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lease.Releasables; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.store.TransportNodesListShardStoreMetaData; @@ -54,12 +53,11 @@ public class GatewayAllocator extends AbstractComponent { asyncFetchStore = ConcurrentCollections.newConcurrentMap(); @Inject - public GatewayAllocator(Settings settings, ClusterService clusterService, RoutingService routingService, + public GatewayAllocator(ClusterService clusterService, RoutingService routingService, TransportNodesListGatewayStartedShards startedAction, TransportNodesListShardStoreMetaData storeAction) { - super(settings); this.routingService = routingService; - this.primaryShardAllocator = new InternalPrimaryShardAllocator(settings, startedAction); - this.replicaShardAllocator = new InternalReplicaShardAllocator(settings, storeAction); + this.primaryShardAllocator = new InternalPrimaryShardAllocator(startedAction); + this.replicaShardAllocator = new InternalReplicaShardAllocator(storeAction); clusterService.addStateApplier(event -> { boolean cleanCache = false; DiscoveryNode localNode = event.state().nodes().getLocalNode(); @@ -80,8 +78,7 @@ public class GatewayAllocator extends AbstractComponent { } // for tests - protected GatewayAllocator(Settings settings) { - super(settings); + protected GatewayAllocator() { this.routingService = null; this.primaryShardAllocator = null; this.replicaShardAllocator = null; @@ -157,8 +154,7 @@ public class GatewayAllocator extends AbstractComponent { private final TransportNodesListGatewayStartedShards startedAction; - InternalPrimaryShardAllocator(Settings settings, TransportNodesListGatewayStartedShards startedAction) { - super(settings); + InternalPrimaryShardAllocator(TransportNodesListGatewayStartedShards startedAction) { this.startedAction = startedAction; } @@ -182,8 +178,7 @@ public class GatewayAllocator extends AbstractComponent { private final TransportNodesListShardStoreMetaData storeAction; - InternalReplicaShardAllocator(Settings settings, TransportNodesListShardStoreMetaData storeAction) { - super(settings); + InternalReplicaShardAllocator(TransportNodesListShardStoreMetaData storeAction) { this.storeAction = storeAction; } diff --git a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index fd65d17b1d8..9bbb5af5bf0 100644 --- a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -69,7 +69,6 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateA public GatewayMetaState(Settings settings, NodeEnvironment nodeEnv, MetaStateService metaStateService, MetaDataIndexUpgradeService metaDataIndexUpgradeService, MetaDataUpgrader metaDataUpgrader) throws IOException { - super(settings); this.nodeEnv = nodeEnv; this.metaStateService = metaStateService; diff --git a/server/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java b/server/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java index 914d45f5b5a..efdf29e2eb6 100644 --- a/server/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java +++ b/server/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java @@ -35,7 +35,6 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -64,9 +63,8 @@ public class LocalAllocateDangledIndices extends AbstractComponent { private final MetaDataIndexUpgradeService metaDataIndexUpgradeService; @Inject - public LocalAllocateDangledIndices(Settings settings, TransportService transportService, ClusterService clusterService, + public LocalAllocateDangledIndices(TransportService transportService, ClusterService clusterService, AllocationService allocationService, MetaDataIndexUpgradeService metaDataIndexUpgradeService) { - super(settings); this.transportService = transportService; this.clusterService = clusterService; this.allocationService = allocationService; diff --git a/server/src/main/java/org/elasticsearch/gateway/MetaStateService.java b/server/src/main/java/org/elasticsearch/gateway/MetaStateService.java index 9377247488e..24f5fd63662 100644 --- a/server/src/main/java/org/elasticsearch/gateway/MetaStateService.java +++ b/server/src/main/java/org/elasticsearch/gateway/MetaStateService.java @@ -24,7 +24,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; @@ -42,8 +41,7 @@ public class MetaStateService extends AbstractComponent { private final NodeEnvironment nodeEnv; private final NamedXContentRegistry namedXContentRegistry; - public MetaStateService(Settings settings, NodeEnvironment nodeEnv, NamedXContentRegistry namedXContentRegistry) { - super(settings); + public MetaStateService(NodeEnvironment nodeEnv, NamedXContentRegistry namedXContentRegistry) { this.nodeEnv = nodeEnv; this.namedXContentRegistry = namedXContentRegistry; } diff --git a/server/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java b/server/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java index d79f23be72b..79030336acc 100644 --- a/server/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java +++ b/server/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java @@ -34,7 +34,6 @@ import org.elasticsearch.cluster.routing.allocation.NodeAllocationResult.ShardSt import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.cluster.routing.allocation.decider.Decision.Type; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.ShardLockObtainFailedException; import org.elasticsearch.gateway.AsyncShardFetch.FetchResult; import org.elasticsearch.gateway.TransportNodesListGatewayStartedShards.NodeGatewayStartedShards; @@ -63,11 +62,6 @@ import java.util.stream.Stream; * copy that can immediately be promoted to primary, as this takes place in {@link RoutingNodes#failShard}. */ public abstract class PrimaryShardAllocator extends BaseGatewayShardAllocator { - - public PrimaryShardAllocator(Settings settings) { - super(settings); - } - /** * Is the allocator responsible for allocating the given {@link ShardRouting}? */ diff --git a/server/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java b/server/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java index 777e8e31505..10bd6115b4c 100644 --- a/server/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java +++ b/server/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java @@ -39,7 +39,6 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.store.StoreFileMetaData; @@ -55,11 +54,6 @@ import java.util.Objects; import static org.elasticsearch.cluster.routing.UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING; public abstract class ReplicaShardAllocator extends BaseGatewayShardAllocator { - - public ReplicaShardAllocator(Settings settings) { - super(settings); - } - /** * Process existing recoveries of replicas and see if we need to cancel them if we find a better * match. Today, a better match is one that has full sync id match compared to not having one in diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index e4fd949d88e..c34a5228b7f 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -188,8 +188,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust this.indexStore = indexStore; indexFieldData.setListener(new FieldDataCacheListener(this)); this.bitsetFilterCache = new BitsetFilterCache(indexSettings, new BitsetCacheListener(this)); - this.warmer = new IndexWarmer(indexSettings.getSettings(), threadPool, indexFieldData, - bitsetFilterCache.createListener(threadPool)); + this.warmer = new IndexWarmer(threadPool, indexFieldData, bitsetFilterCache.createListener(threadPool)); this.indexCache = new IndexCache(indexSettings, queryCache, bitsetFilterCache); this.engineFactory = Objects.requireNonNull(engineFactory); // initialize this last -- otherwise if the wrapper requires any other member to be non-null we fail with an NPE diff --git a/server/src/main/java/org/elasticsearch/index/IndexWarmer.java b/server/src/main/java/org/elasticsearch/index/IndexWarmer.java index 98716e9545d..277cdcaba26 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexWarmer.java +++ b/server/src/main/java/org/elasticsearch/index/IndexWarmer.java @@ -22,7 +22,6 @@ package org.elasticsearch.index; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.index.DirectoryReader; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -46,9 +45,8 @@ public final class IndexWarmer extends AbstractComponent { private final List listeners; - IndexWarmer(Settings settings, ThreadPool threadPool, IndexFieldDataService indexFieldDataService, + IndexWarmer(ThreadPool threadPool, IndexFieldDataService indexFieldDataService, Listener... listeners) { - super(settings); ArrayList list = new ArrayList<>(); final Executor executor = threadPool.executor(ThreadPool.Names.WARMER); list.add(new FieldDataWarmer(executor, indexFieldDataService)); diff --git a/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java b/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java index 3530fe5ae5d..683a5a79c36 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java +++ b/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java @@ -30,7 +30,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; + import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -62,13 +62,12 @@ public class PrimaryReplicaSyncer extends AbstractComponent { private volatile ByteSizeValue chunkSize = DEFAULT_CHUNK_SIZE; @Inject - public PrimaryReplicaSyncer(Settings settings, TransportService transportService, TransportResyncReplicationAction syncAction) { - this(settings, transportService.getTaskManager(), syncAction); + public PrimaryReplicaSyncer(TransportService transportService, TransportResyncReplicationAction syncAction) { + this(transportService.getTaskManager(), syncAction); } // for tests - public PrimaryReplicaSyncer(Settings settings, TaskManager taskManager, SyncAction syncAction) { - super(settings); + public PrimaryReplicaSyncer(TaskManager taskManager, SyncAction syncAction) { this.taskManager = taskManager; this.syncAction = syncAction; } diff --git a/server/src/main/java/org/elasticsearch/indices/IndexingMemoryController.java b/server/src/main/java/org/elasticsearch/indices/IndexingMemoryController.java index 46acd5155ff..bf3046c9548 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndexingMemoryController.java +++ b/server/src/main/java/org/elasticsearch/indices/IndexingMemoryController.java @@ -94,7 +94,6 @@ public class IndexingMemoryController extends AbstractComponent implements Index private final ShardsIndicesStatusChecker statusChecker; IndexingMemoryController(Settings settings, ThreadPool threadPool, Iterable indexServices) { - super(settings); this.indexShards = indexServices; ByteSizeValue indexingBuffer = INDEX_BUFFER_SIZE_SETTING.get(settings); diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java b/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java index 2695c172849..129b839bac7 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java @@ -71,7 +71,6 @@ public class IndicesQueryCache extends AbstractComponent implements QueryCache, private final Map stats2 = new IdentityHashMap<>(); public IndicesQueryCache(Settings settings) { - super(settings); final ByteSizeValue size = INDICES_CACHE_QUERY_SIZE_SETTING.get(settings); final int count = INDICES_CACHE_QUERY_COUNT_SETTING.get(settings); logger.debug("using [node] query cache with size [{}] max filter count [{}]", diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java b/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java index 626d6e8df17..7c227b3366f 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java @@ -88,7 +88,6 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo private final Cache cache; IndicesRequestCache(Settings settings) { - super(settings); this.size = INDICES_CACHE_QUERY_SIZE.get(settings); this.expire = INDICES_CACHE_QUERY_EXPIRE.exists(settings) ? INDICES_CACHE_QUERY_EXPIRE.get(settings) : null; long sizeInBytes = size.getBytes(); diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java index 4a55b86291e..8c2eb9b67b8 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java @@ -90,7 +90,6 @@ public class HunspellService extends AbstractComponent { public HunspellService(final Settings settings, final Environment env, final Map knownDictionaries) throws IOException { - super(settings); this.knownDictionaries = Collections.unmodifiableMap(knownDictionaries); this.hunspellDir = resolveHunspellDirectory(env); this.defaultIgnoreCase = HUNSPELL_IGNORE_CASE.get(settings); diff --git a/server/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java b/server/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java index 4a784af6bb3..a5945187de1 100644 --- a/server/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java +++ b/server/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java @@ -58,7 +58,6 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL private final Cache cache; public IndicesFieldDataCache(Settings settings, IndexFieldDataCache.Listener indicesFieldDataCacheListener) { - super(settings); this.indicesFieldDataCacheListener = indicesFieldDataCacheListener; final long sizeInBytes = INDICES_FIELDDATA_CACHE_SIZE_KEY.get(settings).getBytes(); CacheBuilder cacheBuilder = CacheBuilder.builder() diff --git a/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java b/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java index aeb88021f26..28b5eeeba6b 100644 --- a/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java +++ b/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java @@ -40,7 +40,6 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.index.Index; @@ -84,8 +83,7 @@ public class SyncedFlushService extends AbstractComponent implements IndexEventL private final IndexNameExpressionResolver indexNameExpressionResolver; @Inject - public SyncedFlushService(Settings settings, IndicesService indicesService, ClusterService clusterService, TransportService transportService, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings); + public SyncedFlushService(IndicesService indicesService, ClusterService clusterService, TransportService transportService, IndexNameExpressionResolver indexNameExpressionResolver) { this.indicesService = indicesService; this.clusterService = clusterService; this.transportService = transportService; diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoverySourceService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoverySourceService.java index ec05f0e30b0..bd237ae4533 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoverySourceService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoverySourceService.java @@ -61,9 +61,8 @@ public class PeerRecoverySourceService extends AbstractComponent implements Inde final OngoingRecoveries ongoingRecoveries = new OngoingRecoveries(); @Inject - public PeerRecoverySourceService(Settings settings, TransportService transportService, IndicesService indicesService, + public PeerRecoverySourceService(TransportService transportService, IndicesService indicesService, RecoverySettings recoverySettings) { - super(settings); this.transportService = transportService; this.indicesService = indicesService; this.recoverySettings = recoverySettings; diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java index 39709eb3ac2..e461628ac49 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java @@ -101,9 +101,8 @@ public class PeerRecoveryTargetService extends AbstractComponent implements Inde private final RecoveriesCollection onGoingRecoveries; - public PeerRecoveryTargetService(Settings settings, ThreadPool threadPool, TransportService transportService, RecoverySettings - recoverySettings, ClusterService clusterService) { - super(settings); + public PeerRecoveryTargetService(ThreadPool threadPool, TransportService transportService, + RecoverySettings recoverySettings, ClusterService clusterService) { this.threadPool = threadPool; this.transportService = transportService; this.recoverySettings = recoverySettings; diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java index e238277b698..b90bed90d05 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java @@ -85,8 +85,6 @@ public class RecoverySettings extends AbstractComponent { private volatile ByteSizeValue chunkSize = DEFAULT_CHUNK_SIZE; public RecoverySettings(Settings settings, ClusterSettings clusterSettings) { - super(settings); - this.retryDelayStateSync = INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING.get(settings); // doesn't have to be fast as nodes are reconnected every 10s by default (see InternalClusterService.ReconnectToNodes) // and we want to give the master time to remove a faulty node diff --git a/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java index 8f387c5f7f0..5db490fbb7f 100644 --- a/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java +++ b/server/src/main/java/org/elasticsearch/indices/store/IndicesStore.java @@ -92,7 +92,6 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe @Inject public IndicesStore(Settings settings, IndicesService indicesService, ClusterService clusterService, TransportService transportService, ThreadPool threadPool) { - super(settings); this.settings = settings; this.indicesService = indicesService; this.clusterService = clusterService; diff --git a/server/src/main/java/org/elasticsearch/monitor/fs/FsProbe.java b/server/src/main/java/org/elasticsearch/monitor/fs/FsProbe.java index cacba54d80a..16ab96be067 100644 --- a/server/src/main/java/org/elasticsearch/monitor/fs/FsProbe.java +++ b/server/src/main/java/org/elasticsearch/monitor/fs/FsProbe.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.NodeEnvironment.NodePath; @@ -45,8 +44,7 @@ public class FsProbe extends AbstractComponent { private final NodeEnvironment nodeEnv; - public FsProbe(Settings settings, NodeEnvironment nodeEnv) { - super(settings); + public FsProbe(NodeEnvironment nodeEnv) { this.nodeEnv = nodeEnv; } diff --git a/server/src/main/java/org/elasticsearch/monitor/fs/FsService.java b/server/src/main/java/org/elasticsearch/monitor/fs/FsService.java index 66534514b36..66058c9f79c 100644 --- a/server/src/main/java/org/elasticsearch/monitor/fs/FsService.java +++ b/server/src/main/java/org/elasticsearch/monitor/fs/FsService.java @@ -48,8 +48,7 @@ public class FsService extends AbstractComponent { Property.NodeScope); public FsService(final Settings settings, final NodeEnvironment nodeEnvironment, ClusterInfoService clusterInfoService) { - super(settings); - this.probe = new FsProbe(settings, nodeEnvironment); + this.probe = new FsProbe(nodeEnvironment); this.clusterInfoService = clusterInfoService; refreshInterval = REFRESH_INTERVAL_SETTING.get(settings); logger.debug("using refresh_interval [{}]", refreshInterval); diff --git a/server/src/main/java/org/elasticsearch/monitor/jvm/JvmService.java b/server/src/main/java/org/elasticsearch/monitor/jvm/JvmService.java index f9d9fd80be0..eb6bd6f2f56 100644 --- a/server/src/main/java/org/elasticsearch/monitor/jvm/JvmService.java +++ b/server/src/main/java/org/elasticsearch/monitor/jvm/JvmService.java @@ -38,7 +38,6 @@ public class JvmService extends AbstractComponent { Property.NodeScope); public JvmService(Settings settings) { - super(settings); this.jvmInfo = JvmInfo.jvmInfo(); this.jvmStats = JvmStats.jvmStats(); diff --git a/server/src/main/java/org/elasticsearch/monitor/os/OsService.java b/server/src/main/java/org/elasticsearch/monitor/os/OsService.java index ef6270850a7..8812c1f25b7 100644 --- a/server/src/main/java/org/elasticsearch/monitor/os/OsService.java +++ b/server/src/main/java/org/elasticsearch/monitor/os/OsService.java @@ -38,7 +38,6 @@ public class OsService extends AbstractComponent { Property.NodeScope); public OsService(Settings settings) { - super(settings); this.probe = OsProbe.getInstance(); TimeValue refreshInterval = REFRESH_INTERVAL_SETTING.get(settings); this.info = probe.osInfo(refreshInterval.millis(), EsExecutors.numberOfProcessors(settings)); diff --git a/server/src/main/java/org/elasticsearch/monitor/process/ProcessService.java b/server/src/main/java/org/elasticsearch/monitor/process/ProcessService.java index 7bebeb97586..aba7993850a 100644 --- a/server/src/main/java/org/elasticsearch/monitor/process/ProcessService.java +++ b/server/src/main/java/org/elasticsearch/monitor/process/ProcessService.java @@ -37,7 +37,6 @@ public final class ProcessService extends AbstractComponent { Property.NodeScope); public ProcessService(Settings settings) { - super(settings); this.probe = ProcessProbe.getInstance(); final TimeValue refreshInterval = REFRESH_INTERVAL_SETTING.get(settings); processStatsCache = new ProcessStatsCache(refreshInterval, probe.processStats()); diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 875b8b2149d..6480ef3ffae 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -352,7 +352,7 @@ public class Node implements Closeable { clusterService.getClusterSettings(), client); final ClusterInfoService clusterInfoService = newClusterInfoService(settings, clusterService, threadPool, client, listener::onNewInfo); - final UsageService usageService = new UsageService(settings); + final UsageService usageService = new UsageService(); ModulesBuilder modules = new ModulesBuilder(); // plugin modules must be added here, before others or we can get crazy injection errors... @@ -394,7 +394,7 @@ public class Node implements Closeable { ClusterModule.getNamedXWriteables().stream()) .flatMap(Function.identity()).collect(toList())); modules.add(new RepositoriesModule(this.environment, pluginsService.filterPlugins(RepositoryPlugin.class), xContentRegistry)); - final MetaStateService metaStateService = new MetaStateService(settings, nodeEnvironment, xContentRegistry); + final MetaStateService metaStateService = new MetaStateService(nodeEnvironment, xContentRegistry); // collect engine factory providers from server and from plugins final Collection enginePlugins = pluginsService.filterPlugins(EnginePlugin.class); @@ -418,7 +418,7 @@ public class Node implements Closeable { threadPool, settingsModule.getIndexScopedSettings(), circuitBreakerService, bigArrays, scriptModule.getScriptService(), client, metaStateService, engineFactoryProviders, indexStoreFactories); - final AliasValidator aliasValidator = new AliasValidator(settings); + final AliasValidator aliasValidator = new AliasValidator(); final MetaDataCreateIndexService metaDataCreateIndexService = new MetaDataCreateIndexService( settings, @@ -462,7 +462,7 @@ public class Node implements Closeable { indicesModule.getMapperRegistry(), settingsModule.getIndexScopedSettings(), indexMetaDataUpgraders); final GatewayMetaState gatewayMetaState = new GatewayMetaState(settings, nodeEnvironment, metaStateService, metaDataIndexUpgradeService, metaDataUpgrader); - new TemplateUpgradeService(settings, client, clusterService, threadPool, indexTemplateMetaDataUpgraders); + new TemplateUpgradeService(client, clusterService, threadPool, indexTemplateMetaDataUpgraders); final Transport transport = networkModule.getTransportSupplier().get(); Set taskHeaders = Stream.concat( pluginsService.filterPlugins(ActionPlugin.class).stream().flatMap(p -> p.getTaskHeaders().stream()), @@ -470,8 +470,8 @@ public class Node implements Closeable { ).collect(Collectors.toSet()); final TransportService transportService = newTransportService(settings, transport, threadPool, networkModule.getTransportInterceptor(), localNodeFactory, settingsModule.getClusterSettings(), taskHeaders); - final ResponseCollectorService responseCollectorService = new ResponseCollectorService(this.settings, clusterService); - final SearchTransportService searchTransportService = new SearchTransportService(settings, transportService, + final ResponseCollectorService responseCollectorService = new ResponseCollectorService(clusterService); + final SearchTransportService searchTransportService = new SearchTransportService(transportService, SearchExecutionStatsCollector.makeWrapper(responseCollectorService)); final HttpServerTransport httpServerTransport = newHttpTransport(networkModule); @@ -494,10 +494,10 @@ public class Node implements Closeable { .flatMap(List::stream) .collect(toList()); - final PersistentTasksExecutorRegistry registry = new PersistentTasksExecutorRegistry(settings, tasksExecutors); + final PersistentTasksExecutorRegistry registry = new PersistentTasksExecutorRegistry(tasksExecutors); final PersistentTasksClusterService persistentTasksClusterService = new PersistentTasksClusterService(settings, registry, clusterService); - final PersistentTasksService persistentTasksService = new PersistentTasksService(settings, clusterService, threadPool, client); + final PersistentTasksService persistentTasksService = new PersistentTasksService(clusterService, threadPool, client); modules.add(b -> { b.bind(Node.class).toInstance(this); @@ -524,12 +524,11 @@ public class Node implements Closeable { b.bind(MetaDataCreateIndexService.class).toInstance(metaDataCreateIndexService); b.bind(SearchService.class).toInstance(searchService); b.bind(SearchTransportService.class).toInstance(searchTransportService); - b.bind(SearchPhaseController.class).toInstance(new SearchPhaseController(settings, - searchService::createReduceContext)); + b.bind(SearchPhaseController.class).toInstance(new SearchPhaseController(searchService::createReduceContext)); b.bind(Transport.class).toInstance(transport); b.bind(TransportService.class).toInstance(transportService); b.bind(NetworkService.class).toInstance(networkService); - b.bind(UpdateHelper.class).toInstance(new UpdateHelper(settings, scriptModule.getScriptService())); + b.bind(UpdateHelper.class).toInstance(new UpdateHelper(scriptModule.getScriptService())); b.bind(MetaDataIndexUpgradeService.class).toInstance(metaDataIndexUpgradeService); b.bind(ClusterInfoService.class).toInstance(clusterInfoService); b.bind(GatewayMetaState.class).toInstance(gatewayMetaState); @@ -537,9 +536,9 @@ public class Node implements Closeable { { RecoverySettings recoverySettings = new RecoverySettings(settings, settingsModule.getClusterSettings()); processRecoverySettings(settingsModule.getClusterSettings(), recoverySettings); - b.bind(PeerRecoverySourceService.class).toInstance(new PeerRecoverySourceService(settings, transportService, + b.bind(PeerRecoverySourceService.class).toInstance(new PeerRecoverySourceService(transportService, indicesService, recoverySettings)); - b.bind(PeerRecoveryTargetService.class).toInstance(new PeerRecoveryTargetService(settings, threadPool, + b.bind(PeerRecoveryTargetService.class).toInstance(new PeerRecoveryTargetService(threadPool, transportService, recoverySettings, clusterService)); } b.bind(HttpServerTransport.class).toInstance(httpServerTransport); diff --git a/server/src/main/java/org/elasticsearch/node/NodeService.java b/server/src/main/java/org/elasticsearch/node/NodeService.java index f0a9d07f3c7..fe9c3d59d66 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeService.java +++ b/server/src/main/java/org/elasticsearch/node/NodeService.java @@ -68,7 +68,6 @@ public class NodeService extends AbstractComponent implements Closeable { @Nullable HttpServerTransport httpServerTransport, IngestService ingestService, ClusterService clusterService, SettingsFilter settingsFilter, ResponseCollectorService responseCollectorService, SearchTransportService searchTransportService) { - super(settings); this.settings = settings; this.threadPool = threadPool; this.monitorService = monitorService; diff --git a/server/src/main/java/org/elasticsearch/node/ResponseCollectorService.java b/server/src/main/java/org/elasticsearch/node/ResponseCollectorService.java index 4ab1bc4cee8..8885728927b 100644 --- a/server/src/main/java/org/elasticsearch/node/ResponseCollectorService.java +++ b/server/src/main/java/org/elasticsearch/node/ResponseCollectorService.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import java.io.IOException; @@ -49,8 +48,7 @@ public final class ResponseCollectorService extends AbstractComponent implements private final ConcurrentMap nodeIdToStats = ConcurrentCollections.newConcurrentMap(); - public ResponseCollectorService(Settings settings, ClusterService clusterService) { - super(settings); + public ResponseCollectorService(ClusterService clusterService) { clusterService.addListener(this); } diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java index 4cb8c722f26..1ebc8332e42 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java @@ -48,7 +48,6 @@ public class PersistentTasksClusterService extends AbstractComponent implements private final EnableAssignmentDecider decider; public PersistentTasksClusterService(Settings settings, PersistentTasksExecutorRegistry registry, ClusterService clusterService) { - super(settings); this.clusterService = clusterService; clusterService.addListener(this); this.registry = registry; diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutor.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutor.java index 758ffbe69a0..a02efb68057 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutor.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutor.java @@ -23,7 +23,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.persistent.PersistentTasksCustomMetaData.Assignment; import org.elasticsearch.persistent.PersistentTasksCustomMetaData.PersistentTask; import org.elasticsearch.tasks.TaskId; @@ -40,8 +39,7 @@ public abstract class PersistentTasksExecutor> taskExecutors; - public PersistentTasksExecutorRegistry(Settings settings, Collection> taskExecutors) { - super(settings); + public PersistentTasksExecutorRegistry(Collection> taskExecutors) { Map> map = new HashMap<>(); for (PersistentTasksExecutor executor : taskExecutors) { map.put(executor.getTaskName(), executor); diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java index 91cdb400aa0..a90415b530b 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.tasks.Task; @@ -57,11 +56,9 @@ public class PersistentTasksNodeService extends AbstractComponent implements Clu private final TaskManager taskManager; private final NodePersistentTasksExecutor nodePersistentTasksExecutor; - public PersistentTasksNodeService(Settings settings, - PersistentTasksService persistentTasksService, + public PersistentTasksNodeService(PersistentTasksService persistentTasksService, PersistentTasksExecutorRegistry persistentTasksExecutorRegistry, TaskManager taskManager, NodePersistentTasksExecutor nodePersistentTasksExecutor) { - super(settings); this.persistentTasksService = persistentTasksService; this.persistentTasksExecutorRegistry = persistentTasksExecutorRegistry; this.taskManager = taskManager; diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java index 665a803a2d9..96775b74ea9 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java @@ -30,7 +30,6 @@ import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.node.NodeClosedException; @@ -55,8 +54,7 @@ public class PersistentTasksService extends AbstractComponent { private final ClusterService clusterService; private final ThreadPool threadPool; - public PersistentTasksService(Settings settings, ClusterService clusterService, ThreadPool threadPool, Client client) { - super(settings); + public PersistentTasksService(ClusterService clusterService, ThreadPool threadPool, Client client) { this.client = client; this.clusterService = clusterService; this.threadPool = threadPool; diff --git a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java index eb71b7ad136..cdf296752ea 100644 --- a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java @@ -203,7 +203,7 @@ public class StartPersistentTaskAction extends Action { indexNameExpressionResolver, Request::new); this.persistentTasksClusterService = persistentTasksClusterService; NodePersistentTasksExecutor executor = new NodePersistentTasksExecutor(threadPool); - clusterService.addListener(new PersistentTasksNodeService(settings, persistentTasksService, persistentTasksExecutorRegistry, + clusterService.addListener(new PersistentTasksNodeService(persistentTasksService, persistentTasksExecutorRegistry, transportService.getTaskManager(), executor)); } diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java index f92405e9966..5dfdeb09095 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -100,7 +100,6 @@ public class PluginsService extends AbstractComponent { * @param classpathPlugins Plugins that exist in the classpath which should be loaded */ public PluginsService(Settings settings, Path configPath, Path modulesDirectory, Path pluginsDirectory, Collection> classpathPlugins) { - super(settings); this.settings = settings; this.configPath = configPath; diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java index aef4381cd8b..e97f7acf168 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java @@ -69,7 +69,6 @@ public class RepositoriesService extends AbstractComponent implements ClusterSta public RepositoriesService(Settings settings, ClusterService clusterService, TransportService transportService, Map typesRegistry, ThreadPool threadPool) { - super(settings); this.typesRegistry = typesRegistry; this.clusterService = clusterService; this.threadPool = threadPool; @@ -78,7 +77,7 @@ public class RepositoriesService extends AbstractComponent implements ClusterSta if (DiscoveryNode.isDataNode(settings) || DiscoveryNode.isMasterNode(settings)) { clusterService.addStateApplier(this); } - this.verifyAction = new VerifyNodeRepositoryAction(settings, transportService, clusterService, this); + this.verifyAction = new VerifyNodeRepositoryAction(transportService, clusterService, this); } /** diff --git a/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java b/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java index fbaf369912e..59b79aedf95 100644 --- a/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java @@ -29,7 +29,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.repositories.RepositoriesService.VerifyResponse; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -56,8 +55,7 @@ public class VerifyNodeRepositoryAction extends AbstractComponent { private final RepositoriesService repositoriesService; - public VerifyNodeRepositoryAction(Settings settings, TransportService transportService, ClusterService clusterService, RepositoriesService repositoriesService) { - super(settings); + public VerifyNodeRepositoryAction(TransportService transportService, ClusterService clusterService, RepositoriesService repositoriesService) { this.transportService = transportService; this.clusterService = clusterService; this.repositoriesService = repositoriesService; diff --git a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index 6b9432483f3..5bb806c02d6 100644 --- a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -59,7 +59,7 @@ public abstract class BaseRestHandler extends AbstractComponent implements RestH private final LongAdder usageCount = new LongAdder(); protected BaseRestHandler(Settings settings) { - super(settings); + // TODO drop settings from ctor } public final long getUsageCount() { diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 82fcf7178d1..85a3b0bdb45 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.path.PathTrie; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; @@ -75,9 +74,8 @@ public class RestController extends AbstractComponent implements HttpServerTrans private final Set headersToCopy; private UsageService usageService; - public RestController(Settings settings, Set headersToCopy, UnaryOperator handlerWrapper, + public RestController(Set headersToCopy, UnaryOperator handlerWrapper, NodeClient client, CircuitBreakerService circuitBreakerService, UsageService usageService) { - super(settings); this.headersToCopy = headersToCopy; this.usageService = usageService; if (handlerWrapper == null) { diff --git a/server/src/main/java/org/elasticsearch/script/ScriptService.java b/server/src/main/java/org/elasticsearch/script/ScriptService.java index 98ad65aec55..0bc44983e2b 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptService.java @@ -128,7 +128,6 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust private double compilesAllowedPerNano; public ScriptService(Settings settings, Map engines, Map> contexts) { - super(settings); this.settings = Objects.requireNonNull(settings); this.engines = Objects.requireNonNull(engines); this.contexts = Objects.requireNonNull(contexts); diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 890f17ab297..539f2de529f 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -720,7 +720,7 @@ public class SearchModule { registerFetchSubPhase(new FetchSourceSubPhase()); registerFetchSubPhase(new VersionFetchSubPhase()); registerFetchSubPhase(new MatchedQueriesFetchSubPhase()); - registerFetchSubPhase(new HighlightPhase(settings, highlighters)); + registerFetchSubPhase(new HighlightPhase(highlighters)); registerFetchSubPhase(new ScoreFetchSubPhase()); FetchPhaseConstructionContext context = new FetchPhaseConstructionContext(highlighters); diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 444bfe277ce..51750c3953a 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -190,7 +190,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv this.scriptService = scriptService; this.responseCollectorService = responseCollectorService; this.bigArrays = bigArrays; - this.queryPhase = new QueryPhase(clusterService.getSettings()); + this.queryPhase = new QueryPhase(); this.fetchPhase = fetchPhase; this.multiBucketConsumerService = new MultiBucketConsumerService(clusterService, settings); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java index 359fca2fc3d..5298e7eca05 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourceContext.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.fetch.subphase; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; @@ -27,7 +26,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -46,8 +44,6 @@ import java.util.function.Function; */ public class FetchSourceContext implements Writeable, ToXContentObject { - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(FetchSourceContext.class)); - public static final ParseField INCLUDES_FIELD = new ParseField("includes", "include"); public static final ParseField EXCLUDES_FIELD = new ParseField("excludes", "exclude"); @@ -110,21 +106,11 @@ public class FetchSourceContext implements Writeable, ToXContentObject { } String sIncludes = request.param("_source_includes"); - String sInclude = request.param("_source_include"); - if (sInclude != null) { - DEPRECATION_LOGGER.deprecated("Deprecated parameter [_source_include] used, expected [_source_includes] instead"); - sIncludes = sInclude; - } if (sIncludes != null) { sourceIncludes = Strings.splitStringByCommaToArray(sIncludes); } String sExcludes = request.param("_source_excludes"); - String sExclude = request.param("_source_exclude"); - if (sExclude != null) { - DEPRECATION_LOGGER.deprecated("Deprecated parameter [_source_exclude] used, expected [_source_excludes] instead"); - sExcludes = sExclude; - } if (sExcludes != null) { sourceExcludes = Strings.splitStringByCommaToArray(sExcludes); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java index 11e46061d67..b4cbd031167 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java @@ -20,9 +20,7 @@ package org.elasticsearch.search.fetch.subphase.highlight; import org.apache.lucene.search.Query; -import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.SourceFieldMapper; @@ -35,11 +33,10 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; -public class HighlightPhase extends AbstractComponent implements FetchSubPhase { +public class HighlightPhase implements FetchSubPhase { private final Map highlighters; - public HighlightPhase(Settings settings, Map highlighters) { - super(settings); + public HighlightPhase(Map highlighters) { this.highlighters = highlighters; } diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 56d409ef313..3523966b7ed 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -39,7 +39,6 @@ import org.apache.lucene.util.Counter; import org.elasticsearch.action.search.SearchTask; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor; import org.elasticsearch.common.util.concurrent.QueueResizingEsThreadPoolExecutor; import org.elasticsearch.search.DocValueFormat; @@ -79,10 +78,10 @@ public class QueryPhase implements SearchPhase { private final SuggestPhase suggestPhase; private RescorePhase rescorePhase; - public QueryPhase(Settings settings) { + public QueryPhase() { this.aggregationPhase = new AggregationPhase(); - this.suggestPhase = new SuggestPhase(settings); - this.rescorePhase = new RescorePhase(settings); + this.suggestPhase = new SuggestPhase(); + this.rescorePhase = new RescorePhase(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java b/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java index 7f5a1be285d..f4f3317a81b 100644 --- a/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java +++ b/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java @@ -22,9 +22,7 @@ package org.elasticsearch.search.rescore; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TopDocs; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.SearchPhase; import org.elasticsearch.search.internal.SearchContext; @@ -33,12 +31,7 @@ import java.io.IOException; /** * Rescore phase of a search request, used to run potentially expensive scoring models against the top matching documents. */ -public class RescorePhase extends AbstractComponent implements SearchPhase { - - public RescorePhase(Settings settings) { - super(settings); - } - +public class RescorePhase implements SearchPhase { @Override public void preProcess(SearchContext context) { } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/SuggestPhase.java b/server/src/main/java/org/elasticsearch/search/suggest/SuggestPhase.java index 874448b924c..89b1f089581 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/SuggestPhase.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/SuggestPhase.java @@ -20,8 +20,6 @@ package org.elasticsearch.search.suggest; import org.apache.lucene.util.CharsRefBuilder; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.SearchPhase; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.suggest.Suggest.Suggestion; @@ -37,12 +35,7 @@ import java.util.Map; /** * Suggest phase of a search request, used to collect suggestions */ -public class SuggestPhase extends AbstractComponent implements SearchPhase { - - public SuggestPhase(Settings settings) { - super(settings); - } - +public class SuggestPhase implements SearchPhase { @Override public void preProcess(SearchContext context) { } diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index 791b59a1d5b..7e1873c3f0a 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -151,10 +151,9 @@ public class RestoreService extends AbstractComponent implements ClusterStateApp private final CleanRestoreStateTaskExecutor cleanRestoreStateTaskExecutor; @Inject - public RestoreService(Settings settings, ClusterService clusterService, RepositoriesService repositoriesService, + public RestoreService(ClusterService clusterService, RepositoriesService repositoriesService, AllocationService allocationService, MetaDataCreateIndexService createIndexService, MetaDataIndexUpgradeService metaDataIndexUpgradeService, ClusterSettings clusterSettings) { - super(settings); this.clusterService = clusterService; this.repositoriesService = repositoriesService; this.allocationService = allocationService; diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskManager.java b/server/src/main/java/org/elasticsearch/tasks/TaskManager.java index 73af26cfc70..bb93eac6af8 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskManager.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskManager.java @@ -79,7 +79,6 @@ public class TaskManager extends AbstractComponent implements ClusterStateApplie private final ByteSizeValue maxHeaderSize; public TaskManager(Settings settings, ThreadPool threadPool, Set taskHeaders) { - super(settings); this.threadPool = threadPool; this.taskHeaders = new ArrayList<>(taskHeaders); this.maxHeaderSize = SETTING_HTTP_MAX_HEADER_SIZE.get(settings); diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java b/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java index 1a837f7d6d9..86c546370b7 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java @@ -69,8 +69,7 @@ public class TaskResultsService extends AbstractComponent { private final ClusterService clusterService; @Inject - public TaskResultsService(Settings settings, Client client, ClusterService clusterService) { - super(settings); + public TaskResultsService(Client client, ClusterService clusterService) { this.client = client; this.clusterService = clusterService; } diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index ecf311bc4b9..91d5d9fa371 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -162,8 +162,6 @@ public class ThreadPool extends AbstractComponent implements Scheduler, Closeabl Setting.timeSetting("thread_pool.estimated_time_interval", TimeValue.timeValueMillis(200), Setting.Property.NodeScope); public ThreadPool(final Settings settings, final ExecutorBuilder... customBuilders) { - super(settings); - assert Node.NODE_NAME_SETTING.exists(settings); final Map builders = new HashMap<>(); diff --git a/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java b/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java index 114bb8c986a..609ce106fab 100644 --- a/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java +++ b/server/src/main/java/org/elasticsearch/transport/ConnectionManager.java @@ -202,7 +202,7 @@ public class ConnectionManager implements Closeable { threadPool.generic().execute(() -> { closeLock.writeLock().lock(); try { - // we are holding a write lock so nobody modifies the connectedNodes / openConnections map - it's safe to first close + // we are holding a write lock so nobody adds to the connectedNodes / openConnections map - it's safe to first close // all instances and then clear them maps Iterator> iterator = connectedNodes.entrySet().iterator(); while (iterator.hasNext()) { diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java index f643a91fb48..fdb3745d130 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java @@ -175,9 +175,8 @@ public abstract class RemoteClusterAware extends AbstractComponent { * @param settings the nodes level settings */ protected RemoteClusterAware(Settings settings) { - super(settings); this.settings = settings; - this.clusterNameResolver = new ClusterNameExpressionResolver(settings); + this.clusterNameResolver = new ClusterNameExpressionResolver(); } /** diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java index 4dadc362b80..9f53a42646b 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java @@ -110,7 +110,6 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo RemoteClusterConnection(Settings settings, String clusterAlias, List> seedNodes, TransportService transportService, ConnectionManager connectionManager, int maxNumRemoteConnections, Predicate nodePredicate, String proxyAddress) { - super(settings); this.transportService = transportService; this.maxNumRemoteConnections = maxNumRemoteConnections; this.nodePredicate = nodePredicate; diff --git a/server/src/main/java/org/elasticsearch/usage/UsageService.java b/server/src/main/java/org/elasticsearch/usage/UsageService.java index 9f742cca9d9..9e1c2e03734 100644 --- a/server/src/main/java/org/elasticsearch/usage/UsageService.java +++ b/server/src/main/java/org/elasticsearch/usage/UsageService.java @@ -40,9 +40,6 @@ package org.elasticsearch.usage; import org.elasticsearch.action.admin.cluster.node.usage.NodeUsage; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import java.util.ArrayList; @@ -53,14 +50,12 @@ import java.util.Map; /** * A service to monitor usage of Elasticsearch features. */ -public class UsageService extends AbstractComponent { +public class UsageService { private final List handlers; private final long sinceTime; - @Inject - public UsageService(Settings settings) { - super(settings); + public UsageService() { this.handlers = new ArrayList<>(); this.sinceTime = System.currentTimeMillis(); } diff --git a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java index 1fa4197e749..8dc24735a29 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java @@ -111,8 +111,8 @@ public class ActionModuleTests extends ESTestCase { public void testSetupRestHandlerContainsKnownBuiltin() { SettingsModule settings = new SettingsModule(Settings.EMPTY); - UsageService usageService = new UsageService(settings.getSettings()); - ActionModule actionModule = new ActionModule(false, settings.getSettings(), new IndexNameExpressionResolver(Settings.EMPTY), + UsageService usageService = new UsageService(); + ActionModule actionModule = new ActionModule(false, settings.getSettings(), new IndexNameExpressionResolver(), settings.getIndexScopedSettings(), settings.getClusterSettings(), settings.getSettingsFilter(), null, emptyList(), null, null, usageService); actionModule.initRestHandlers(null); @@ -134,8 +134,8 @@ public class ActionModuleTests extends ESTestCase { SettingsModule settings = new SettingsModule(Settings.EMPTY); ThreadPool threadPool = new TestThreadPool(getTestName()); try { - UsageService usageService = new UsageService(settings.getSettings()); - ActionModule actionModule = new ActionModule(false, settings.getSettings(), new IndexNameExpressionResolver(Settings.EMPTY), + UsageService usageService = new UsageService(); + ActionModule actionModule = new ActionModule(false, settings.getSettings(), new IndexNameExpressionResolver(), settings.getIndexScopedSettings(), settings.getClusterSettings(), settings.getSettingsFilter(), threadPool, singletonList(dupsMainAction), null, null, usageService); Exception e = expectThrows(IllegalArgumentException.class, () -> actionModule.initRestHandlers(null)); @@ -166,8 +166,8 @@ public class ActionModuleTests extends ESTestCase { SettingsModule settings = new SettingsModule(Settings.EMPTY); ThreadPool threadPool = new TestThreadPool(getTestName()); try { - UsageService usageService = new UsageService(settings.getSettings()); - ActionModule actionModule = new ActionModule(false, settings.getSettings(), new IndexNameExpressionResolver(Settings.EMPTY), + UsageService usageService = new UsageService(); + ActionModule actionModule = new ActionModule(false, settings.getSettings(), new IndexNameExpressionResolver(), settings.getIndexScopedSettings(), settings.getClusterSettings(), settings.getSettingsFilter(), threadPool, singletonList(registersFakeHandler), null, null, usageService); actionModule.initRestHandlers(null); diff --git a/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java b/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java index 40795bff730..036c0b97cca 100644 --- a/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java +++ b/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java @@ -393,7 +393,7 @@ public class IndicesRequestIT extends ESIntegTestCase { internalCluster().coordOnlyNodeClient().admin().indices().flush(flushRequest).actionGet(); clearInterceptedActions(); - String[] indices = new IndexNameExpressionResolver(Settings.EMPTY) + String[] indices = new IndexNameExpressionResolver() .concreteIndexNames(client().admin().cluster().prepareState().get().getState(), flushRequest); assertIndicesSubset(Arrays.asList(indices), indexShardActions); } @@ -418,7 +418,7 @@ public class IndicesRequestIT extends ESIntegTestCase { internalCluster().coordOnlyNodeClient().admin().indices().refresh(refreshRequest).actionGet(); clearInterceptedActions(); - String[] indices = new IndexNameExpressionResolver(Settings.EMPTY) + String[] indices = new IndexNameExpressionResolver() .concreteIndexNames(client().admin().cluster().prepareState().get().getState(), refreshRequest); assertIndicesSubset(Arrays.asList(indices), indexShardActions); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainActionTests.java index 067dd0daea0..a75510cfb64 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainActionTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.cluster.routing.allocation.ShardAllocationDecision; import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -46,13 +45,13 @@ import static org.elasticsearch.action.admin.cluster.allocation.TransportCluster */ public class ClusterAllocationExplainActionTests extends ESTestCase { - private static final AllocationDeciders NOOP_DECIDERS = new AllocationDeciders(Settings.EMPTY, Collections.emptyList()); + private static final AllocationDeciders NOOP_DECIDERS = new AllocationDeciders(Collections.emptyList()); public void testInitializingOrRelocatingShardExplanation() throws Exception { ShardRoutingState shardRoutingState = randomFrom(ShardRoutingState.INITIALIZING, ShardRoutingState.RELOCATING); ClusterState clusterState = ClusterStateCreationUtils.state("idx", randomBoolean(), shardRoutingState); ShardRouting shard = clusterState.getRoutingTable().index("idx").shard(0).primaryShard(); - RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, Collections.emptyList()), + RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Collections.emptyList()), clusterState.getRoutingNodes(), clusterState, null, System.nanoTime()); ClusterAllocationExplanation cae = TransportClusterAllocationExplainAction.explainShard(shard, allocation, null, randomBoolean(), new TestGatewayAllocator(), new ShardsAllocator() { diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java index 6e34a751007..853a991cf93 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java @@ -80,8 +80,8 @@ public class ClusterRerouteTests extends ESAllocationTestCase { } public void testClusterStateUpdateTask() { - AllocationService allocationService = new AllocationService(Settings.builder().build(), new AllocationDeciders(Settings.EMPTY, - Collections.singleton(new MaxRetryAllocationDecider(Settings.EMPTY))), + AllocationService allocationService = new AllocationService( + new AllocationDeciders(Collections.singleton(new MaxRetryAllocationDecider())), new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); ClusterState clusterState = createInitialClusterState(allocationService); ClusterRerouteRequest req = new ClusterRerouteRequest(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexActionTests.java index 9bf4d9d32f6..8052d8fcf57 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexActionTests.java @@ -109,7 +109,7 @@ public class GetIndexActionTests extends ESSingleNodeTestCase { TestTransportGetIndexAction() { super(Settings.EMPTY, GetIndexActionTests.this.transportService, GetIndexActionTests.this.clusterService, GetIndexActionTests.this.threadPool, settingsFilter, new ActionFilters(emptySet()), - new GetIndexActionTests.Resolver(Settings.EMPTY), indicesService, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); + new GetIndexActionTests.Resolver(), indicesService, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); } @Override @@ -121,10 +121,6 @@ public class GetIndexActionTests extends ESSingleNodeTestCase { } static class Resolver extends IndexNameExpressionResolver { - Resolver(Settings settings) { - super(settings); - } - @Override public String[] concreteIndexNames(ClusterState state, IndicesRequest request) { return request.indices(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java index d59700f2b7a..cbc9499cda3 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java @@ -245,7 +245,7 @@ public class TransportRolloverActionTests extends ESTestCase { public void testGenerateRolloverIndexName() { String invalidIndexName = randomAlphaOfLength(10) + "A"; - IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(Settings.EMPTY); + IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(); expectThrows(IllegalArgumentException.class, () -> TransportRolloverAction.generateRolloverIndexName(invalidIndexName, indexNameExpressionResolver)); int num = randomIntBetween(0, 100); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsActionTests.java index 85b85cf9e14..711390f02e7 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsActionTests.java @@ -60,7 +60,7 @@ public class GetSettingsActionTests extends ESTestCase { TestTransportGetSettingsAction() { super(Settings.EMPTY, GetSettingsActionTests.this.transportService, GetSettingsActionTests.this.clusterService, GetSettingsActionTests.this.threadPool, settingsFilter, new ActionFilters(Collections.emptySet()), - new Resolver(Settings.EMPTY), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); + new Resolver(), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); } @Override protected void masterOperation(GetSettingsRequest request, ClusterState state, ActionListener listener) { @@ -129,10 +129,6 @@ public class GetSettingsActionTests extends ESTestCase { } static class Resolver extends IndexNameExpressionResolver { - Resolver(Settings settings) { - super(settings); - } - @Override public String[] concreteIndexNames(ClusterState state, IndicesRequest request) { return request.indices(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeActionTests.java index ce60b14b3ef..8f2db128148 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeActionTests.java @@ -107,8 +107,8 @@ public class TransportResizeActionTests extends ESTestCase { ClusterState clusterState = ClusterState.builder(createClusterState("source", randomIntBetween(2, 10), 0, Settings.builder().put("index.blocks.write", true).build())).nodes(DiscoveryNodes.builder().add(newNode("node1"))) .build(); - AllocationService service = new AllocationService(Settings.builder().build(), new AllocationDeciders(Settings.EMPTY, - Collections.singleton(new MaxRetryAllocationDecider(Settings.EMPTY))), + AllocationService service = new AllocationService(new AllocationDeciders( + Collections.singleton(new MaxRetryAllocationDecider())), new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); RoutingTable routingTable = service.reroute(clusterState, "reroute").routingTable(); @@ -126,8 +126,8 @@ public class TransportResizeActionTests extends ESTestCase { ClusterState clusterState = ClusterState.builder(createClusterState("source", 1, 0, Settings.builder().put("index.blocks.write", true).build())).nodes(DiscoveryNodes.builder().add(newNode("node1"))) .build(); - AllocationService service = new AllocationService(Settings.builder().build(), new AllocationDeciders(Settings.EMPTY, - Collections.singleton(new MaxRetryAllocationDecider(Settings.EMPTY))), + AllocationService service = new AllocationService(new AllocationDeciders( + Collections.singleton(new MaxRetryAllocationDecider())), new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); RoutingTable routingTable = service.reroute(clusterState, "reroute").routingTable(); @@ -156,8 +156,8 @@ public class TransportResizeActionTests extends ESTestCase { ClusterState clusterState = ClusterState.builder(createClusterState("source", numShards, 0, numShards * 4, Settings.builder().put("index.blocks.write", true).build())).nodes(DiscoveryNodes.builder().add(newNode("node1"))) .build(); - AllocationService service = new AllocationService(Settings.builder().build(), new AllocationDeciders(Settings.EMPTY, - Collections.singleton(new MaxRetryAllocationDecider(Settings.EMPTY))), + AllocationService service = new AllocationService(new AllocationDeciders( + Collections.singleton(new MaxRetryAllocationDecider())), new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); RoutingTable routingTable = service.reroute(clusterState, "reroute").routingTable(); @@ -191,8 +191,8 @@ public class TransportResizeActionTests extends ESTestCase { .put("index.blocks.write", true) .build())).nodes(DiscoveryNodes.builder().add(newNode("node1"))) .build(); - AllocationService service = new AllocationService(Settings.builder().build(), new AllocationDeciders(Settings.EMPTY, - Collections.singleton(new MaxRetryAllocationDecider(Settings.EMPTY))), + AllocationService service = new AllocationService(new AllocationDeciders( + Collections.singleton(new MaxRetryAllocationDecider())), new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); RoutingTable routingTable = service.reroute(clusterState, "reroute").routingTable(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java index 892721f8a5c..6302766be90 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java @@ -183,8 +183,8 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { null, xContentRegistry, true); - MetaDataIndexTemplateService service = new MetaDataIndexTemplateService(Settings.EMPTY, null, createIndexService, - new AliasValidator(Settings.EMPTY), null, + MetaDataIndexTemplateService service = new MetaDataIndexTemplateService(null, createIndexService, + new AliasValidator(), null, new IndexScopedSettings(Settings.EMPTY, IndexScopedSettings.BUILT_IN_INDEX_SETTINGS), xContentRegistry); final List throwables = new ArrayList<>(); @@ -217,7 +217,7 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { xContentRegistry(), true); MetaDataIndexTemplateService service = new MetaDataIndexTemplateService( - Settings.EMPTY, clusterService, createIndexService, new AliasValidator(Settings.EMPTY), indicesService, + clusterService, createIndexService, new AliasValidator(), indicesService, new IndexScopedSettings(Settings.EMPTY, IndexScopedSettings.BUILT_IN_INDEX_SETTINGS), xContentRegistry()); final List throwables = new ArrayList<>(); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java index 7fdb12ff135..57bbdb477f8 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java @@ -126,7 +126,7 @@ public class TransportBulkActionIngestTests extends ESTestCase { null, null, new ActionFilters(Collections.emptySet()), null, new AutoCreateIndex( SETTINGS, new ClusterSettings(SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), - new IndexNameExpressionResolver(SETTINGS) + new IndexNameExpressionResolver() ) ); } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java index a1abd4d61f7..36bb8094787 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java @@ -58,8 +58,8 @@ public class TransportBulkActionTests extends ESTestCase { TestTransportBulkAction() { super(Settings.EMPTY, TransportBulkActionTests.this.threadPool, transportService, clusterService, null, null, - null, new ActionFilters(Collections.emptySet()), new Resolver(Settings.EMPTY), - new AutoCreateIndex(Settings.EMPTY, clusterService.getClusterSettings(), new Resolver(Settings.EMPTY))); + null, new ActionFilters(Collections.emptySet()), new Resolver(), + new AutoCreateIndex(Settings.EMPTY, clusterService.getClusterSettings(), new Resolver())); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java index f6559f22641..7ac2d6e00ea 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java @@ -97,7 +97,7 @@ public class TransportBulkActionTookTests extends ESTestCase { boundAddress -> clusterService.localNode(), null, Collections.emptySet()); transportService.start(); transportService.acceptIncomingRequests(); - IndexNameExpressionResolver resolver = new Resolver(Settings.EMPTY); + IndexNameExpressionResolver resolver = new Resolver(); ActionFilters actionFilters = new ActionFilters(new HashSet<>()); NodeClient client = new NodeClient(Settings.EMPTY, threadPool) { @@ -205,10 +205,6 @@ public class TransportBulkActionTookTests extends ESTestCase { } static class Resolver extends IndexNameExpressionResolver { - Resolver(Settings settings) { - super(settings); - } - @Override public String[] concreteIndexNames(ClusterState state, IndicesRequest request) { return request.indices(); diff --git a/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java index 2798d661600..b4a90c174f0 100644 --- a/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchService; @@ -60,9 +59,7 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { final boolean shard1 = randomBoolean(); final boolean shard2 = randomBoolean(); - SearchTransportService searchTransportService = new SearchTransportService( - Settings.builder().put("cluster.remote.connect", false).build(), null, null) { - + SearchTransportService searchTransportService = new SearchTransportService(null, null) { @Override public void sendCanMatch(Transport.Connection connection, ShardSearchTransportRequest request, SearchTask task, ActionListener listener) { @@ -119,9 +116,7 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { lookup.put("node1", new SearchAsyncActionTests.MockConnection(primaryNode)); lookup.put("node2", new SearchAsyncActionTests.MockConnection(replicaNode)); final boolean shard1 = randomBoolean(); - SearchTransportService searchTransportService = new SearchTransportService( - Settings.builder().put("cluster.remote.connect", false).build(), null, null) { - + SearchTransportService searchTransportService = new SearchTransportService(null, null) { @Override public void sendCanMatch(Transport.Connection connection, ShardSearchTransportRequest request, SearchTask task, ActionListener listener) { @@ -187,7 +182,7 @@ public class CanMatchPreFilterSearchPhaseTests extends ESTestCase { final SearchTransportService searchTransportService = - new SearchTransportService(Settings.builder().put("cluster.remote.connect", false).build(), null, null) { + new SearchTransportService(null, null) { @Override public void sendCanMatch( Transport.Connection connection, diff --git a/server/src/test/java/org/elasticsearch/action/search/ClearScrollControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/ClearScrollControllerTests.java index bd1d6a85b09..55c39f735ce 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ClearScrollControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ClearScrollControllerTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchPhaseResult; @@ -70,7 +69,7 @@ public class ClearScrollControllerTests extends ESTestCase { } }; List nodesInvoked = new CopyOnWriteArrayList<>(); - SearchTransportService searchTransportService = new SearchTransportService(Settings.EMPTY, null, null) { + SearchTransportService searchTransportService = new SearchTransportService(null, null) { @Override public void sendClearAllScrollContexts(Transport.Connection connection, ActionListener listener) { nodesInvoked.add(connection.getNode()); @@ -135,7 +134,7 @@ public class ClearScrollControllerTests extends ESTestCase { } }; List nodesInvoked = new CopyOnWriteArrayList<>(); - SearchTransportService searchTransportService = new SearchTransportService(Settings.EMPTY, null, null) { + SearchTransportService searchTransportService = new SearchTransportService(null, null) { @Override public void sendFreeContext(Transport.Connection connection, long contextId, @@ -213,7 +212,7 @@ public class ClearScrollControllerTests extends ESTestCase { } }; List nodesInvoked = new CopyOnWriteArrayList<>(); - SearchTransportService searchTransportService = new SearchTransportService(Settings.EMPTY, null, null) { + SearchTransportService searchTransportService = new SearchTransportService(null, null) { @Override public void sendFreeContext(Transport.Connection connection, long contextId, diff --git a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java index fe9be2a06e2..c7a31372304 100644 --- a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java @@ -25,7 +25,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; import org.apache.lucene.store.MockDirectoryWrapper; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.Index; @@ -59,11 +58,9 @@ public class DfsQueryPhaseTests extends ESTestCase { results.get(0).termsStatistics(new Term[0], new TermStatistics[0]); results.get(1).termsStatistics(new Term[0], new TermStatistics[0]); - SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, + SearchPhaseController controller = new SearchPhaseController( (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); - SearchTransportService searchTransportService = new SearchTransportService( - Settings.builder().put("cluster.remote.connect", false).build(), null, null) { - + SearchTransportService searchTransportService = new SearchTransportService(null, null) { @Override public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest request, SearchTask task, SearchActionListener listener) { @@ -121,11 +118,9 @@ public class DfsQueryPhaseTests extends ESTestCase { results.get(0).termsStatistics(new Term[0], new TermStatistics[0]); results.get(1).termsStatistics(new Term[0], new TermStatistics[0]); - SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, + SearchPhaseController controller = new SearchPhaseController( (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); - SearchTransportService searchTransportService = new SearchTransportService( - Settings.builder().put("cluster.remote.connect", false).build(), null, null) { - + SearchTransportService searchTransportService = new SearchTransportService(null, null) { @Override public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest request, SearchTask task, SearchActionListener listener) { @@ -180,11 +175,9 @@ public class DfsQueryPhaseTests extends ESTestCase { results.get(0).termsStatistics(new Term[0], new TermStatistics[0]); results.get(1).termsStatistics(new Term[0], new TermStatistics[0]); - SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, + SearchPhaseController controller = new SearchPhaseController( (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); - SearchTransportService searchTransportService = new SearchTransportService( - Settings.builder().put("cluster.remote.connect", false).build(), null, null) { - + SearchTransportService searchTransportService = new SearchTransportService(null, null) { @Override public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest request, SearchTask task, SearchActionListener listener) { diff --git a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java index 7d19ee58f9f..9fe4f92ef2b 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.document.DocumentField; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.InnerHitBuilder; @@ -69,9 +68,7 @@ public class ExpandSearchPhaseTests extends ESTestCase { .setInnerHits(IntStream.range(0, numInnerHits).mapToObj(hitNum -> new InnerHitBuilder().setName("innerHit" + hitNum)) .collect(Collectors.toList())))); mockSearchPhaseContext.getRequest().source().query(originalQuery); - mockSearchPhaseContext.searchTransport = new SearchTransportService( - Settings.builder().put("cluster.remote.connect", false).build(), null, null) { - + mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null) { @Override void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { assertTrue(executedMultiSearch.compareAndSet(false, true)); @@ -144,9 +141,7 @@ public class ExpandSearchPhaseTests extends ESTestCase { String collapseValue = randomBoolean() ? null : "boom"; mockSearchPhaseContext.getRequest().source(new SearchSourceBuilder() .collapse(new CollapseBuilder("someField").setInnerHits(new InnerHitBuilder().setName("foobarbaz")))); - mockSearchPhaseContext.searchTransport = new SearchTransportService( - Settings.builder().put("cluster.remote.connect", false).build(), null, null) { - + mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null) { @Override void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { assertTrue(executedMultiSearch.compareAndSet(false, true)); @@ -186,9 +181,7 @@ public class ExpandSearchPhaseTests extends ESTestCase { public void testSkipPhase() throws IOException { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); - mockSearchPhaseContext.searchTransport = new SearchTransportService( - Settings.builder().put("cluster.remote.connect", false).build(), null, null) { - + mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null) { @Override void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { fail("no collapsing here"); @@ -217,9 +210,7 @@ public class ExpandSearchPhaseTests extends ESTestCase { public void testSkipExpandCollapseNoHits() throws IOException { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); - mockSearchPhaseContext.searchTransport = new SearchTransportService( - Settings.builder().put("cluster.remote.connect", false).build(), null, null) { - + mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null) { @Override void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { fail("expand should not try to send empty multi search request"); @@ -249,9 +240,7 @@ public class ExpandSearchPhaseTests extends ESTestCase { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); boolean version = randomBoolean(); - mockSearchPhaseContext.searchTransport = new SearchTransportService( - Settings.builder().put("cluster.remote.connect", false).build(), null, null) { - + mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null) { @Override void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { final QueryBuilder postFilter = QueryBuilders.existsQuery("foo"); diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index 55ca24826fc..3db14ec19b8 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -23,7 +23,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; import org.apache.lucene.store.MockDirectoryWrapper; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.Index; import org.elasticsearch.search.DocValueFormat; @@ -47,7 +46,7 @@ import java.util.concurrent.atomic.AtomicReference; public class FetchSearchPhaseTests extends ESTestCase { public void testShortcutQueryAndFetchOptimization() throws IOException { - SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, + SearchPhaseController controller = new SearchPhaseController( (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); InitialSearchPhase.ArraySearchPhaseResults results = @@ -90,7 +89,7 @@ public class FetchSearchPhaseTests extends ESTestCase { public void testFetchTwoDocument() throws IOException { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); - SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, + SearchPhaseController controller = new SearchPhaseController( (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); InitialSearchPhase.ArraySearchPhaseResults results = controller.newSearchPhaseResults(mockSearchPhaseContext.getRequest(), 2); @@ -110,8 +109,7 @@ public class FetchSearchPhaseTests extends ESTestCase { queryResult.setShardIndex(1); results.consumeResult(queryResult); - SearchTransportService searchTransportService = new SearchTransportService( - Settings.builder().put("cluster.remote.connect", false).build(), null, null) { + SearchTransportService searchTransportService = new SearchTransportService(null, null) { @Override public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task, SearchActionListener listener) { @@ -147,7 +145,7 @@ public class FetchSearchPhaseTests extends ESTestCase { public void testFailFetchOneDoc() throws IOException { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); - SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, + SearchPhaseController controller = new SearchPhaseController( (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); InitialSearchPhase.ArraySearchPhaseResults results = controller.newSearchPhaseResults(mockSearchPhaseContext.getRequest(), 2); @@ -167,8 +165,7 @@ public class FetchSearchPhaseTests extends ESTestCase { queryResult.setShardIndex(1); results.consumeResult(queryResult); - SearchTransportService searchTransportService = new SearchTransportService( - Settings.builder().put("cluster.remote.connect", false).build(), null, null) { + SearchTransportService searchTransportService = new SearchTransportService(null, null) { @Override public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task, SearchActionListener listener) { @@ -208,7 +205,7 @@ public class FetchSearchPhaseTests extends ESTestCase { int resultSetSize = randomIntBetween(0, 100); // we use at least 2 hits otherwise this is subject to single shard optimization and we trip an assert... int numHits = randomIntBetween(2, 100); // also numshards --> 1 hit per shard - SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, + SearchPhaseController controller = new SearchPhaseController( (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(numHits); InitialSearchPhase.ArraySearchPhaseResults results = @@ -222,8 +219,7 @@ public class FetchSearchPhaseTests extends ESTestCase { queryResult.setShardIndex(i); results.consumeResult(queryResult); } - SearchTransportService searchTransportService = new SearchTransportService( - Settings.builder().put("cluster.remote.connect", false).build(), null, null) { + SearchTransportService searchTransportService = new SearchTransportService(null, null) { @Override public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task, SearchActionListener listener) { @@ -266,7 +262,7 @@ public class FetchSearchPhaseTests extends ESTestCase { public void testExceptionFailsPhase() throws IOException { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); - SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, + SearchPhaseController controller = new SearchPhaseController( (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); InitialSearchPhase.ArraySearchPhaseResults results = controller.newSearchPhaseResults(mockSearchPhaseContext.getRequest(), 2); @@ -286,8 +282,7 @@ public class FetchSearchPhaseTests extends ESTestCase { queryResult.setShardIndex(1); results.consumeResult(queryResult); AtomicInteger numFetches = new AtomicInteger(0); - SearchTransportService searchTransportService = new SearchTransportService( - Settings.builder().put("cluster.remote.connect", false).build(), null, null) { + SearchTransportService searchTransportService = new SearchTransportService(null, null) { @Override public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task, SearchActionListener listener) { @@ -322,7 +317,7 @@ public class FetchSearchPhaseTests extends ESTestCase { public void testCleanupIrrelevantContexts() throws IOException { // contexts that are not fetched should be cleaned up MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); - SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, + SearchPhaseController controller = new SearchPhaseController( (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); InitialSearchPhase.ArraySearchPhaseResults results = controller.newSearchPhaseResults(mockSearchPhaseContext.getRequest(), 2); @@ -342,8 +337,7 @@ public class FetchSearchPhaseTests extends ESTestCase { queryResult.setShardIndex(1); results.consumeResult(queryResult); - SearchTransportService searchTransportService = new SearchTransportService( - Settings.builder().put("cluster.remote.connect", false).build(), null, null) { + SearchTransportService searchTransportService = new SearchTransportService(null, null) { @Override public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task, SearchActionListener listener) { diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java index f327086cd00..ece695575a1 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java @@ -184,11 +184,6 @@ public class MultiSearchActionTookTests extends ESTestCase { } static class Resolver extends IndexNameExpressionResolver { - - Resolver(Settings settings) { - super(settings); - } - @Override public String[] concreteIndexNames(ClusterState state, IndicesRequest request) { return request.indices(); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index d3e8c069601..6b5a6ad24de 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -30,7 +30,6 @@ import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchPhaseResult; @@ -93,7 +92,7 @@ public class SearchAsyncActionTests extends ESTestCase { } } - SearchTransportService transportService = new SearchTransportService(Settings.EMPTY, null, null); + SearchTransportService transportService = new SearchTransportService(null, null); Map lookup = new HashMap<>(); Map seenShard = new ConcurrentHashMap<>(); lookup.put(primaryNode.getId(), new MockConnection(primaryNode)); @@ -185,7 +184,7 @@ public class SearchAsyncActionTests extends ESTestCase { GroupShardsIterator shardsIter = getShardsIter("idx", new OriginalIndices(new String[]{"idx"}, IndicesOptions.strictExpandOpenAndForbidClosed()), 10, randomBoolean(), primaryNode, replicaNode); - SearchTransportService transportService = new SearchTransportService(Settings.EMPTY, null, null); + SearchTransportService transportService = new SearchTransportService(null, null); Map lookup = new HashMap<>(); Map seenShard = new ConcurrentHashMap<>(); lookup.put(primaryNode.getId(), new MockConnection(primaryNode)); @@ -286,7 +285,7 @@ public class SearchAsyncActionTests extends ESTestCase { new OriginalIndices(new String[]{"idx"}, IndicesOptions.strictExpandOpenAndForbidClosed()), randomIntBetween(1, 10), randomBoolean(), primaryNode, replicaNode); AtomicInteger numFreedContext = new AtomicInteger(); - SearchTransportService transportService = new SearchTransportService(Settings.EMPTY, null, null) { + SearchTransportService transportService = new SearchTransportService(null, null) { @Override public void sendFreeContext(Transport.Connection connection, long contextId, OriginalIndices originalIndices) { numFreedContext.incrementAndGet(); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index b109e82beef..f4cb7d224d2 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -25,7 +25,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.TotalHits.Relation; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.AtomicArray; @@ -70,7 +69,7 @@ public class SearchPhaseControllerTests extends ESTestCase { @Before public void setup() { - searchPhaseController = new SearchPhaseController(Settings.EMPTY, + searchPhaseController = new SearchPhaseController( (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); } diff --git a/server/src/test/java/org/elasticsearch/action/support/AutoCreateIndexTests.java b/server/src/test/java/org/elasticsearch/action/support/AutoCreateIndexTests.java index 4918939c90b..39c5920f7d9 100644 --- a/server/src/test/java/org/elasticsearch/action/support/AutoCreateIndexTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/AutoCreateIndexTests.java @@ -177,7 +177,7 @@ public class AutoCreateIndexTests extends ESTestCase { ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, clusterSettings, new IndexNameExpressionResolver(settings)); + AutoCreateIndex autoCreateIndex = new AutoCreateIndex(settings, clusterSettings, new IndexNameExpressionResolver()); assertThat(autoCreateIndex.getAutoCreate().isAutoCreateIndex(), equalTo(value)); Settings newSettings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), !value).build(); @@ -202,7 +202,7 @@ public class AutoCreateIndexTests extends ESTestCase { private AutoCreateIndex newAutoCreateIndex(Settings settings) { return new AutoCreateIndex(settings, new ClusterSettings(settings, - ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), new IndexNameExpressionResolver(settings)); + ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), new IndexNameExpressionResolver()); } private void expectNotMatch(ClusterState clusterState, AutoCreateIndex autoCreateIndex, String index) { diff --git a/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index b34c21c885c..f9a88c777f3 100644 --- a/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -177,10 +177,6 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { } class MyResolver extends IndexNameExpressionResolver { - MyResolver() { - super(Settings.EMPTY); - } - @Override public String[] concreteIndexNames(ClusterState state, IndicesRequest request) { return request.indices(); diff --git a/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java b/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java index fbdd873b2c6..e638a578c6b 100644 --- a/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java @@ -133,7 +133,7 @@ public class TransportMasterNodeActionTests extends ESTestCase { Action(Settings settings, String actionName, TransportService transportService, ClusterService clusterService, ThreadPool threadPool) { super(settings, actionName, transportService, clusterService, threadPool, - new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), Request::new); + new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(), Request::new); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index c6938754a0c..c7420f9714a 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -101,7 +101,7 @@ public class BroadcastReplicationTests extends ESTestCase { transportService.start(); transportService.acceptIncomingRequests(); broadcastReplicationAction = new TestBroadcastReplicationAction(Settings.EMPTY, clusterService, transportService, - new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), null); + new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(), null); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index ff868c3250a..0469fac4d7d 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -167,7 +167,7 @@ public class TransportReplicationActionTests extends ESTestCase { TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> clusterService.localNode(), null, Collections.emptySet()); transportService.start(); transportService.acceptIncomingRequests(); - shardStateAction = new ShardStateAction(Settings.EMPTY, clusterService, transportService, null, null, threadPool); + shardStateAction = new ShardStateAction(clusterService, transportService, null, null, threadPool); action = new TestAction(Settings.EMPTY, "internal:testAction", transportService, clusterService, shardStateAction, threadPool); } @@ -1114,7 +1114,7 @@ public class TransportReplicationActionTests extends ESTestCase { ThreadPool threadPool) { super(settings, actionName, transportService, clusterService, mockIndicesService(clusterService), threadPool, shardStateAction, - new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), + new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(), Request::new, Request::new, ThreadPool.Names.SAME); } @@ -1123,7 +1123,7 @@ public class TransportReplicationActionTests extends ESTestCase { ThreadPool threadPool, boolean withDocumentFailureOnPrimary, boolean withDocumentFailureOnReplica) { super(settings, actionName, transportService, clusterService, mockIndicesService(clusterService), threadPool, shardStateAction, - new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), + new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(), Request::new, Request::new, ThreadPool.Names.SAME); } diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java index 1f1e9eb2a1e..6582634e228 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java @@ -260,7 +260,7 @@ public class TransportWriteActionTests extends ESTestCase { TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> clusterService.localNode(), null, Collections.emptySet()); transportService.start(); transportService.acceptIncomingRequests(); - ShardStateAction shardStateAction = new ShardStateAction(Settings.EMPTY, clusterService, transportService, null, null, threadPool); + ShardStateAction shardStateAction = new ShardStateAction(clusterService, transportService, null, null, threadPool); TestAction action = new TestAction(Settings.EMPTY, "internal:testAction", transportService, clusterService, shardStateAction, threadPool); final String index = "test"; @@ -360,7 +360,7 @@ public class TransportWriteActionTests extends ESTestCase { super(Settings.EMPTY, "internal:test", new TransportService(Settings.EMPTY, mock(Transport.class), null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()), null, null, null, null, - new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), TestRequest::new, + new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(), TestRequest::new, TestRequest::new, ThreadPool.Names.SAME); this.withDocumentFailureOnPrimary = withDocumentFailureOnPrimary; this.withDocumentFailureOnReplica = withDocumentFailureOnReplica; @@ -370,7 +370,7 @@ public class TransportWriteActionTests extends ESTestCase { ClusterService clusterService, ShardStateAction shardStateAction, ThreadPool threadPool) { super(settings, actionName, transportService, clusterService, mockIndicesService(clusterService), threadPool, shardStateAction, - new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), + new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(), TestRequest::new, TestRequest::new, ThreadPool.Names.SAME); this.withDocumentFailureOnPrimary = false; this.withDocumentFailureOnReplica = false; diff --git a/server/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java b/server/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java index 864987cad99..7d46c9182cd 100644 --- a/server/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java @@ -125,10 +125,6 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { } class MyResolver extends IndexNameExpressionResolver { - MyResolver() { - super(Settings.EMPTY); - } - @Override public String[] concreteIndexNames(ClusterState state, IndicesRequest request) { return request.indices(); diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index ff7697745da..ce29fce8921 100644 --- a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.update; -import org.elasticsearch.Version; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; @@ -135,9 +134,7 @@ public class UpdateRequestTests extends ESTestCase { final MockScriptEngine engine = new MockScriptEngine("mock", scripts, Collections.emptyMap()); Map engines = Collections.singletonMap(engine.getType(), engine); ScriptService scriptService = new ScriptService(baseSettings, engines, ScriptModule.CORE_CONTEXTS); - final Settings settings = settings(Version.CURRENT).build(); - - updateHelper = new UpdateHelper(settings, scriptService); + updateHelper = new UpdateHelper(scriptService); } @SuppressWarnings("unchecked") diff --git a/server/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java b/server/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java index 61507a8363d..ae1caa787d4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java @@ -74,8 +74,8 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { private final BlockingActionFilter blockingActionFilter; - public TestPlugin(Settings settings) { - blockingActionFilter = new BlockingActionFilter(settings); + public TestPlugin() { + blockingActionFilter = new BlockingActionFilter(); } @Override @@ -87,10 +87,6 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { public static class BlockingActionFilter extends org.elasticsearch.action.support.ActionFilter.Simple { private Set blockedActions = emptySet(); - public BlockingActionFilter(Settings settings) { - super(settings); - } - @Override protected boolean apply(String action, ActionRequest request, ActionListener listener) { if (blockedActions.contains(action)) { diff --git a/server/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java b/server/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java index af3807226a9..9734bf37b5b 100644 --- a/server/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java @@ -65,8 +65,7 @@ public class ClusterModuleTests extends ModuleTestCase { private ClusterService clusterService = new ClusterService(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), null); static class FakeAllocationDecider extends AllocationDecider { - protected FakeAllocationDecider(Settings settings) { - super(settings); + protected FakeAllocationDecider() { } } @@ -129,7 +128,7 @@ public class ClusterModuleTests extends ModuleTestCase { Collections.singletonList(new ClusterPlugin() { @Override public Collection createAllocationDeciders(Settings settings, ClusterSettings clusterSettings) { - return Collections.singletonList(new FakeAllocationDecider(settings)); + return Collections.singletonList(new FakeAllocationDecider()); } }), clusterInfoService); assertTrue(module.deciderList.stream().anyMatch(d -> d.getClass().equals(FakeAllocationDecider.class))); diff --git a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java index 4e39e12b7ce..67f5871506c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java @@ -39,7 +39,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; @@ -89,9 +88,9 @@ public class ShardStateActionTests extends ESTestCase { private ClusterService clusterService; private static class TestShardStateAction extends ShardStateAction { - TestShardStateAction(Settings settings, ClusterService clusterService, TransportService transportService, + TestShardStateAction(ClusterService clusterService, TransportService transportService, AllocationService allocationService, RoutingService routingService) { - super(settings, clusterService, transportService, allocationService, routingService, THREAD_POOL); + super(clusterService, transportService, allocationService, routingService, THREAD_POOL); } private Runnable onBeforeWaitForNewMasterAndRetry; @@ -130,7 +129,7 @@ public class ShardStateActionTests extends ESTestCase { TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> clusterService.localNode(), null, Collections.emptySet()); transportService.start(); transportService.acceptIncomingRequests(); - shardStateAction = new TestShardStateAction(Settings.EMPTY, clusterService, transportService, null, null); + shardStateAction = new TestShardStateAction(clusterService, transportService, null, null); shardStateAction.setOnBeforeWaitForNewMasterAndRetry(() -> { }); shardStateAction.setOnAfterWaitForNewMasterAndRetry(() -> { diff --git a/server/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java b/server/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java index 32a8c06bb70..3bf9ae9937b 100644 --- a/server/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java @@ -77,7 +77,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; public class ClusterStateHealthTests extends ESTestCase { - private final IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(Settings.EMPTY); + private final IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(); private static ThreadPool threadPool; diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/AliasValidatorTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/AliasValidatorTests.java index 05bd9eeab8c..f1440198076 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/AliasValidatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/AliasValidatorTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.cluster.metadata; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.indices.InvalidAliasNameException; import org.elasticsearch.test.ESTestCase; @@ -27,7 +26,7 @@ import static org.hamcrest.Matchers.startsWith; public class AliasValidatorTests extends ESTestCase { public void testValidatesAliasNames() { - AliasValidator validator = new AliasValidator(Settings.EMPTY); + AliasValidator validator = new AliasValidator(); Exception e = expectThrows(InvalidAliasNameException.class, () -> validator.validateAliasStandalone(".", null)); assertEquals("Invalid alias name [.]: must not be '.' or '..'", e.getMessage()); e = expectThrows(InvalidAliasNameException.class, () -> validator.validateAliasStandalone("..", null)); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ClusterNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ClusterNameExpressionResolverTests.java index d6c8707c1d7..53266ecd4b6 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ClusterNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ClusterNameExpressionResolverTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.cluster.metadata; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; @@ -29,7 +28,7 @@ import java.util.Set; public class ClusterNameExpressionResolverTests extends ESTestCase { - private ClusterNameExpressionResolver clusterNameResolver = new ClusterNameExpressionResolver(Settings.EMPTY); + private ClusterNameExpressionResolver clusterNameResolver = new ClusterNameExpressionResolver(); private static final Set remoteClusters = new HashSet<>(); static { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java index 9585381029f..518a60ffe38 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java @@ -83,7 +83,7 @@ import static org.mockito.Mockito.when; public class IndexCreationTaskTests extends ESTestCase { private final IndicesService indicesService = mock(IndicesService.class); - private final AliasValidator aliasValidator = new AliasValidator(Settings.EMPTY); + private final AliasValidator aliasValidator = new AliasValidator(); private final NamedXContentRegistry xContentRegistry = mock(NamedXContentRegistry.class); private final CreateIndexClusterStateUpdateRequest request = mock(CreateIndexClusterStateUpdateRequest.class); private final Logger logger = mock(Logger.class); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index 2801f9abdd5..e19e2bd2993 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -32,7 +32,6 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData.State; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.IndexClosedException; @@ -57,7 +56,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; public class IndexNameExpressionResolverTests extends ESTestCase { - private final IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(Settings.EMPTY); + private final IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(); public void testIndexOptionsStrict() { MetaData.Builder mdBuilder = MetaData.builder() diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java index 5ccacee395a..aa0f6834b98 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java @@ -171,8 +171,8 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase { ClusterState clusterState = ClusterState.builder(createClusterState("source", numShards, 0, Settings.builder().put("index.blocks.write", true).build())).nodes(DiscoveryNodes.builder().add(newNode("node1"))) .build(); - AllocationService service = new AllocationService(Settings.builder().build(), new AllocationDeciders(Settings.EMPTY, - Collections.singleton(new MaxRetryAllocationDecider(Settings.EMPTY))), + AllocationService service = new AllocationService(new AllocationDeciders( + Collections.singleton(new MaxRetryAllocationDecider())), new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); RoutingTable routingTable = service.reroute(clusterState, "reroute").routingTable(); @@ -241,8 +241,8 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase { ClusterState clusterState = ClusterState.builder(createClusterState("source", numShards, 0, Settings.builder().put("index.blocks.write", true).put("index.number_of_routing_shards", targetShards).build())) .nodes(DiscoveryNodes.builder().add(newNode("node1"))).build(); - AllocationService service = new AllocationService(Settings.builder().build(), new AllocationDeciders(Settings.EMPTY, - Collections.singleton(new MaxRetryAllocationDecider(Settings.EMPTY))), + AllocationService service = new AllocationService(new AllocationDeciders( + Collections.singleton(new MaxRetryAllocationDecider())), new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); RoutingTable routingTable = service.reroute(clusterState, "reroute").routingTable(); @@ -375,9 +375,7 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase { .build(); final AllocationService service = new AllocationService( - Settings.builder().build(), - new AllocationDeciders(Settings.EMPTY, - Collections.singleton(new MaxRetryAllocationDecider(Settings.EMPTY))), + new AllocationDeciders(Collections.singleton(new MaxRetryAllocationDecider())), new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java index e1fbc47c4a0..9b2d58ac287 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java @@ -43,9 +43,9 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class MetaDataIndexAliasesServiceTests extends ESTestCase { - private final AliasValidator aliasValidator = new AliasValidator(Settings.EMPTY); + private final AliasValidator aliasValidator = new AliasValidator(); private final MetaDataDeleteIndexService deleteIndexService = mock(MetaDataDeleteIndexService.class); - private final MetaDataIndexAliasesService service = new MetaDataIndexAliasesService(Settings.EMPTY, null, null, aliasValidator, + private final MetaDataIndexAliasesService service = new MetaDataIndexAliasesService(null, null, aliasValidator, deleteIndexService, xContentRegistry()); public MetaDataIndexAliasesServiceTests() { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceTests.java index f78f8495806..4bff7f8dc61 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceTests.java @@ -36,7 +36,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -105,7 +104,7 @@ public class TemplateUpgradeServiceTests extends ESTestCase { IndexTemplateMetaData.builder("changed_test_template").patterns(randomIndexPatterns()).build() ); - final TemplateUpgradeService service = new TemplateUpgradeService(Settings.EMPTY, null, clusterService, threadPool, + final TemplateUpgradeService service = new TemplateUpgradeService(null, clusterService, threadPool, Arrays.asList( templates -> { if (shouldAdd) { @@ -205,7 +204,7 @@ public class TemplateUpgradeServiceTests extends ESTestCase { additions.put("add_template_" + i, new BytesArray("{\"index_patterns\" : \"*\", \"order\" : " + i + "}")); } - final TemplateUpgradeService service = new TemplateUpgradeService(Settings.EMPTY, mockClient, clusterService, threadPool, + final TemplateUpgradeService service = new TemplateUpgradeService(mockClient, clusterService, threadPool, Collections.emptyList()); IllegalStateException ise = expectThrows(IllegalStateException.class, () -> service.upgradeTemplates(additions, deletions)); @@ -297,7 +296,7 @@ public class TemplateUpgradeServiceTests extends ESTestCase { return null; }).when(mockIndicesAdminClient).deleteTemplate(any(DeleteIndexTemplateRequest.class), any(ActionListener.class)); - new TemplateUpgradeService(Settings.EMPTY, mockClient, clusterService, threadPool, + new TemplateUpgradeService(mockClient, clusterService, threadPool, Arrays.asList( templates -> { assertNull(templates.put("added_test_template", IndexTemplateMetaData.builder("added_test_template") diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java index 7e74a35cf55..c05d0a551fe 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java @@ -537,7 +537,7 @@ public class OperationRoutingTests extends ESTestCase{ Set selectedNodes = new HashSet<>(numShards); TestThreadPool threadPool = new TestThreadPool("testThatOnlyNodesSupportNodeIds"); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); - ResponseCollectorService collector = new ResponseCollectorService(Settings.EMPTY, clusterService); + ResponseCollectorService collector = new ResponseCollectorService(clusterService); Map outstandingRequests = new HashMap<>(); GroupShardsIterator groupIterator = opRouting.searchShards(state, indexNames, null, null, collector, outstandingRequests); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java index ed32b022eec..1ea0a7f8501 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java @@ -574,7 +574,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { Index index = clusterState.getMetaData().index("test").getIndex(); MoveAllocationCommand command = new MoveAllocationCommand(index.getName(), 0, "node1", "node2"); - RoutingAllocation routingAllocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, Collections.emptyList()), + RoutingAllocation routingAllocation = new RoutingAllocation(new AllocationDeciders(Collections.emptyList()), new RoutingNodes(clusterState, false), clusterState, ClusterInfo.EMPTY, System.nanoTime()); logger.info("--> executing move allocation command to non-data node"); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> command.execute(routingAllocation, false)); @@ -612,7 +612,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { Index index = clusterState.getMetaData().index("test").getIndex(); MoveAllocationCommand command = new MoveAllocationCommand(index.getName(), 0, "node2", "node1"); - RoutingAllocation routingAllocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, Collections.emptyList()), + RoutingAllocation routingAllocation = new RoutingAllocation(new AllocationDeciders(Collections.emptyList()), new RoutingNodes(clusterState, false), clusterState, ClusterInfo.EMPTY, System.nanoTime()); logger.info("--> executing move allocation command from non-data node"); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> command.execute(routingAllocation, false)); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java index acf0b260e5d..0ef64e15ce7 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java @@ -264,7 +264,7 @@ public class BalanceConfigurationTests extends ESAllocationTestCase { public void testNoRebalanceOnPrimaryOverload() { Settings.Builder settings = Settings.builder(); - AllocationService strategy = new AllocationService(settings.build(), randomAllocationDeciders(settings.build(), + AllocationService strategy = new AllocationService(randomAllocationDeciders(settings.build(), new ClusterSettings(Settings.Builder.EMPTY_SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), random()), new TestGatewayAllocator(), new ShardsAllocator() { /* @@ -403,11 +403,6 @@ public class BalanceConfigurationTests extends ESAllocationTestCase { } private class NoopGatewayAllocator extends GatewayAllocator { - - NoopGatewayAllocator() { - super(Settings.EMPTY); - } - @Override public void applyStartedShards(RoutingAllocation allocation, List startedShards) { // noop diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalancedSingleShardTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalancedSingleShardTests.java index 405f459e99a..e4a7fa47025 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalancedSingleShardTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalancedSingleShardTests.java @@ -61,7 +61,7 @@ public class BalancedSingleShardTests extends ESAllocationTestCase { randomFrom(ShardRoutingState.INITIALIZING, ShardRoutingState.UNASSIGNED, ShardRoutingState.RELOCATING)); ShardRouting shard = clusterState.routingTable().index("idx").shard(0).primaryShard(); MoveDecision rebalanceDecision = allocator.decideShardAllocation(shard, newRoutingAllocation( - new AllocationDeciders(Settings.EMPTY, Collections.emptyList()), clusterState)).getMoveDecision(); + new AllocationDeciders(Collections.emptyList()), clusterState)).getMoveDecision(); assertSame(MoveDecision.NOT_TAKEN, rebalanceDecision); } @@ -70,7 +70,7 @@ public class BalancedSingleShardTests extends ESAllocationTestCase { ClusterState clusterState = ClusterStateCreationUtils.state("idx", randomBoolean(), ShardRoutingState.STARTED); ShardRouting shard = clusterState.routingTable().index("idx").shard(0).primaryShard(); RoutingAllocation routingAllocation = newRoutingAllocation( - new AllocationDeciders(Settings.EMPTY, Collections.emptyList()), clusterState); + new AllocationDeciders(Collections.emptyList()), clusterState); routingAllocation.setHasPendingAsyncFetch(); MoveDecision rebalanceDecision = allocator.decideShardAllocation(shard, routingAllocation).getMoveDecision(); assertNotNull(rebalanceDecision.getClusterRebalanceDecision()); @@ -85,7 +85,7 @@ public class BalancedSingleShardTests extends ESAllocationTestCase { public void testRebalancingNotAllowedDueToCanRebalance() { final Decision canRebalanceDecision = randomFrom(Decision.NO, Decision.THROTTLE); - AllocationDecider noRebalanceDecider = new AllocationDecider(Settings.EMPTY) { + AllocationDecider noRebalanceDecider = new AllocationDecider() { @Override public Decision canRebalance(ShardRouting shardRouting, RoutingAllocation allocation) { return allocation.decision(canRebalanceDecision, "TEST", "foobar"); @@ -95,7 +95,7 @@ public class BalancedSingleShardTests extends ESAllocationTestCase { ClusterState clusterState = ClusterStateCreationUtils.state("idx", randomBoolean(), ShardRoutingState.STARTED); ShardRouting shard = clusterState.routingTable().index("idx").shard(0).primaryShard(); RoutingAllocation routingAllocation = newRoutingAllocation( - new AllocationDeciders(Settings.EMPTY, Collections.singleton(noRebalanceDecider)), clusterState); + new AllocationDeciders(Collections.singleton(noRebalanceDecider)), clusterState); MoveDecision rebalanceDecision = allocator.decideShardAllocation(shard, routingAllocation).getMoveDecision(); assertEquals(canRebalanceDecision.type(), rebalanceDecision.getClusterRebalanceDecision().type()); assertEquals(AllocationDecision.fromDecisionType(canRebalanceDecision.type()), rebalanceDecision.getAllocationDecision()); @@ -112,7 +112,7 @@ public class BalancedSingleShardTests extends ESAllocationTestCase { } public void testRebalancePossible() { - AllocationDecider canAllocateDecider = new AllocationDecider(Settings.EMPTY) { + AllocationDecider canAllocateDecider = new AllocationDecider() { @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { return Decision.YES; @@ -127,7 +127,7 @@ public class BalancedSingleShardTests extends ESAllocationTestCase { } public void testRebalancingNotAllowedDueToCanAllocate() { - AllocationDecider canAllocateDecider = new AllocationDecider(Settings.EMPTY) { + AllocationDecider canAllocateDecider = new AllocationDecider() { @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { return Decision.NO; @@ -150,7 +150,7 @@ public class BalancedSingleShardTests extends ESAllocationTestCase { } public void testDontBalanceShardWhenThresholdNotMet() { - AllocationDecider canAllocateDecider = new AllocationDecider(Settings.EMPTY) { + AllocationDecider canAllocateDecider = new AllocationDecider() { @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { return Decision.YES; @@ -199,7 +199,7 @@ public class BalancedSingleShardTests extends ESAllocationTestCase { } clusterState = ClusterState.builder(clusterState).nodes(nodesBuilder).build(); - AllocationDecider allocationDecider = new AllocationDecider(Settings.EMPTY) { + AllocationDecider allocationDecider = new AllocationDecider() { @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { if (excludeNodes.contains(node.nodeId())) { @@ -208,7 +208,7 @@ public class BalancedSingleShardTests extends ESAllocationTestCase { return Decision.YES; } }; - AllocationDecider rebalanceDecider = new AllocationDecider(Settings.EMPTY) { + AllocationDecider rebalanceDecider = new AllocationDecider() { @Override public Decision canRebalance(ShardRouting shardRouting, RoutingAllocation allocation) { return Decision.YES; @@ -216,7 +216,7 @@ public class BalancedSingleShardTests extends ESAllocationTestCase { }; List allocationDeciders = Arrays.asList(rebalanceDecider, allocationDecider); RoutingAllocation routingAllocation = newRoutingAllocation( - new AllocationDeciders(Settings.EMPTY, allocationDeciders), clusterState); + new AllocationDeciders(allocationDeciders), clusterState); // allocate and get the node that is now relocating BalancedShardsAllocator allocator = new BalancedShardsAllocator(Settings.EMPTY); allocator.allocate(routingAllocation); @@ -229,7 +229,7 @@ public class BalancedSingleShardTests extends ESAllocationTestCase { } } - routingAllocation = newRoutingAllocation(new AllocationDeciders(Settings.EMPTY, allocationDeciders), clusterState); + routingAllocation = newRoutingAllocation(new AllocationDeciders(allocationDeciders), clusterState); routingAllocation.debugDecision(true); ShardRouting shard = clusterState.getRoutingNodes().activePrimary(shardToRebalance.shardId()); MoveDecision rebalanceDecision = allocator.decideShardAllocation(shard, routingAllocation).getMoveDecision(); @@ -307,7 +307,7 @@ public class BalancedSingleShardTests extends ESAllocationTestCase { if (Float.compare(-1.0f, threshold) != 0) { settings = Settings.builder().put(BalancedShardsAllocator.THRESHOLD_SETTING.getKey(), threshold).build(); } - AllocationDecider allocationDecider = new AllocationDecider(Settings.EMPTY) { + AllocationDecider allocationDecider = new AllocationDecider() { @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { if (noDecisionNodes.contains(node.nodeId())) { @@ -316,7 +316,7 @@ public class BalancedSingleShardTests extends ESAllocationTestCase { return Decision.YES; } }; - AllocationDecider rebalanceDecider = new AllocationDecider(Settings.EMPTY) { + AllocationDecider rebalanceDecider = new AllocationDecider() { @Override public Decision canRebalance(ShardRouting shardRouting, RoutingAllocation allocation) { return Decision.YES; @@ -324,7 +324,7 @@ public class BalancedSingleShardTests extends ESAllocationTestCase { }; BalancedShardsAllocator allocator = new BalancedShardsAllocator(settings); RoutingAllocation routingAllocation = newRoutingAllocation( - new AllocationDeciders(Settings.EMPTY, Arrays.asList(allocationDecider, rebalanceDecider)), clusterState); + new AllocationDeciders(Arrays.asList(allocationDecider, rebalanceDecider)), clusterState); return allocator.decideShardAllocation(shardRouting, routingAllocation).getMoveDecision(); } @@ -340,7 +340,7 @@ public class BalancedSingleShardTests extends ESAllocationTestCase { private Tuple setupStateAndRebalance(AllocationDecider allocationDecider, Settings balancerSettings, boolean rebalanceExpected) { - AllocationDecider rebalanceDecider = new AllocationDecider(Settings.EMPTY) { + AllocationDecider rebalanceDecider = new AllocationDecider() { @Override public Decision canRebalance(ShardRouting shardRouting, RoutingAllocation allocation) { return Decision.YES; @@ -356,7 +356,7 @@ public class BalancedSingleShardTests extends ESAllocationTestCase { clusterState = ClusterState.builder(clusterState).nodes(nodesBuilder).build(); ShardRouting shard = clusterState.routingTable().index("idx").shard(0).primaryShard(); RoutingAllocation routingAllocation = newRoutingAllocation( - new AllocationDeciders(Settings.EMPTY, allocationDeciders), clusterState); + new AllocationDeciders(allocationDeciders), clusterState); MoveDecision rebalanceDecision = allocator.decideShardAllocation(shard, routingAllocation).getMoveDecision(); if (rebalanceExpected == false) { diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/DecisionsImpactOnClusterHealthTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/DecisionsImpactOnClusterHealthTests.java index bee2275743b..8c643c4b1c2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/DecisionsImpactOnClusterHealthTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/DecisionsImpactOnClusterHealthTests.java @@ -159,8 +159,7 @@ public class DecisionsImpactOnClusterHealthTests extends ESAllocationTestCase { } private static AllocationService newAllocationService(Settings settings, Set deciders) { - return new AllocationService(settings, - new AllocationDeciders(settings, deciders), + return new AllocationService(new AllocationDeciders(deciders), new TestGatewayAllocator(), new BalancedShardsAllocator(settings), EmptyClusterInfoService.INSTANCE); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/MaxRetryAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/MaxRetryAllocationDeciderTests.java index 994ee8f1438..2ce0b7b89be 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/MaxRetryAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/MaxRetryAllocationDeciderTests.java @@ -54,8 +54,8 @@ public class MaxRetryAllocationDeciderTests extends ESAllocationTestCase { @Override public void setUp() throws Exception { super.setUp(); - strategy = new AllocationService(Settings.builder().build(), new AllocationDeciders(Settings.EMPTY, - Collections.singleton(new MaxRetryAllocationDecider(Settings.EMPTY))), + strategy = new AllocationService(new AllocationDeciders( + Collections.singleton(new MaxRetryAllocationDecider())), new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); } @@ -175,7 +175,7 @@ public class MaxRetryAllocationDeciderTests extends ESAllocationTestCase { assertEquals(unassignedPrimary.unassignedInfo().getNumFailedAllocations(), i+1); assertThat(unassignedPrimary.unassignedInfo().getMessage(), containsString("boom" + i)); // MaxRetryAllocationDecider#canForceAllocatePrimary should return YES decisions because canAllocate returns YES here - assertEquals(Decision.YES, new MaxRetryAllocationDecider(Settings.EMPTY).canForceAllocatePrimary( + assertEquals(Decision.YES, new MaxRetryAllocationDecider().canForceAllocatePrimary( unassignedPrimary, null, new RoutingAllocation(null, null, clusterState, null, 0))); } // now we go and check that we are actually stick to unassigned on the next failure @@ -193,7 +193,7 @@ public class MaxRetryAllocationDeciderTests extends ESAllocationTestCase { assertEquals(unassignedPrimary.state(), UNASSIGNED); assertThat(unassignedPrimary.unassignedInfo().getMessage(), containsString("boom")); // MaxRetryAllocationDecider#canForceAllocatePrimary should return a NO decision because canAllocate returns NO here - assertEquals(Decision.NO, new MaxRetryAllocationDecider(Settings.EMPTY).canForceAllocatePrimary( + assertEquals(Decision.NO, new MaxRetryAllocationDecider().canForceAllocatePrimary( unassignedPrimary, null, new RoutingAllocation(null, null, clusterState, null, 0))); } @@ -215,7 +215,7 @@ public class MaxRetryAllocationDeciderTests extends ESAllocationTestCase { assertEquals(unassignedPrimary.state(), INITIALIZING); assertThat(unassignedPrimary.unassignedInfo().getMessage(), containsString("boom")); // bumped up the max retry count, so canForceAllocatePrimary should return a YES decision - assertEquals(Decision.YES, new MaxRetryAllocationDecider(Settings.EMPTY).canForceAllocatePrimary( + assertEquals(Decision.YES, new MaxRetryAllocationDecider().canForceAllocatePrimary( routingTable.index("idx").shard(0).shards().get(0), null, new RoutingAllocation(null, null, clusterState, null, 0))); // now we start the shard @@ -242,7 +242,7 @@ public class MaxRetryAllocationDeciderTests extends ESAllocationTestCase { assertEquals(unassignedPrimary.state(), UNASSIGNED); assertThat(unassignedPrimary.unassignedInfo().getMessage(), containsString("ZOOOMG")); // Counter reset, so MaxRetryAllocationDecider#canForceAllocatePrimary should return a YES decision - assertEquals(Decision.YES, new MaxRetryAllocationDecider(Settings.EMPTY).canForceAllocatePrimary( + assertEquals(Decision.YES, new MaxRetryAllocationDecider().canForceAllocatePrimary( unassignedPrimary, null, new RoutingAllocation(null, null, clusterState, null, 0))); } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java index 3b3504408cc..7a1c901671c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java @@ -336,9 +336,9 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { .metaData(metaData) .routingTable(routingTable) .nodes(DiscoveryNodes.builder().add(newNode).add(oldNode1).add(oldNode2)).build(); - AllocationDeciders allocationDeciders = new AllocationDeciders(Settings.EMPTY, - Collections.singleton(new NodeVersionAllocationDecider(Settings.EMPTY))); - AllocationService strategy = new MockAllocationService(Settings.EMPTY, + AllocationDeciders allocationDeciders = new AllocationDeciders( + Collections.singleton(new NodeVersionAllocationDecider())); + AllocationService strategy = new MockAllocationService( allocationDeciders, new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); state = strategy.reroute(state, new AllocationCommands(), true, false).getClusterState(); @@ -369,10 +369,10 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { new SnapshotRecoverySource(new Snapshot("rep1", new SnapshotId("snp1", UUIDs.randomBase64UUID())), Version.CURRENT, "test")).build()) .nodes(DiscoveryNodes.builder().add(newNode).add(oldNode1).add(oldNode2)).build(); - AllocationDeciders allocationDeciders = new AllocationDeciders(Settings.EMPTY, Arrays.asList( - new ReplicaAfterPrimaryActiveAllocationDecider(Settings.EMPTY), - new NodeVersionAllocationDecider(Settings.EMPTY))); - AllocationService strategy = new MockAllocationService(Settings.EMPTY, + AllocationDeciders allocationDeciders = new AllocationDeciders(Arrays.asList( + new ReplicaAfterPrimaryActiveAllocationDecider(), + new NodeVersionAllocationDecider())); + AllocationService strategy = new MockAllocationService( allocationDeciders, new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); state = strategy.reroute(state, new AllocationCommands(), true, false).getClusterState(); @@ -466,7 +466,7 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { null, 0); routingAllocation.debugDecision(true); - final NodeVersionAllocationDecider allocationDecider = new NodeVersionAllocationDecider(Settings.EMPTY); + final NodeVersionAllocationDecider allocationDecider = new NodeVersionAllocationDecider(); Decision decision = allocationDecider.canAllocate(primaryShard, newNode, routingAllocation); assertThat(decision.type(), is(Decision.Type.YES)); assertThat(decision.getExplanation(), is("the primary shard is new or already existed on the node")); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java index c083009a6b5..fe7c4a89c9f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java @@ -58,10 +58,10 @@ public class RandomAllocationDeciderTests extends ESAllocationTestCase { * balance.*/ public void testRandomDecisions() { RandomAllocationDecider randomAllocationDecider = new RandomAllocationDecider(random()); - AllocationService strategy = new AllocationService(Settings.builder().build(), new AllocationDeciders(Settings.EMPTY, + AllocationService strategy = new AllocationService(new AllocationDeciders( new HashSet<>(Arrays.asList(new SameShardAllocationDecider(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), - new ReplicaAfterPrimaryActiveAllocationDecider(Settings.EMPTY), randomAllocationDecider))), + new ReplicaAfterPrimaryActiveAllocationDecider(), randomAllocationDecider))), new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); int indices = scaledRandomIntBetween(1, 20); Builder metaBuilder = MetaData.builder(); @@ -189,7 +189,6 @@ public class RandomAllocationDeciderTests extends ESAllocationTestCase { private final Random random; public RandomAllocationDecider(Random random) { - super(Settings.EMPTY); this.random = random; } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeAllocationDeciderTests.java index eeec65f0e2e..c64bc51bd5b 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeAllocationDeciderTests.java @@ -55,8 +55,8 @@ public class ResizeAllocationDeciderTests extends ESAllocationTestCase { @Override public void setUp() throws Exception { super.setUp(); - strategy = new AllocationService(Settings.builder().build(), new AllocationDeciders(Settings.EMPTY, - Collections.singleton(new ResizeAllocationDecider(Settings.EMPTY))), + strategy = new AllocationService(new AllocationDeciders( + Collections.singleton(new ResizeAllocationDecider())), new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); } @@ -108,7 +108,7 @@ public class ResizeAllocationDeciderTests extends ESAllocationTestCase { public void testNonResizeRouting() { ClusterState clusterState = createInitialClusterState(true); - ResizeAllocationDecider resizeAllocationDecider = new ResizeAllocationDecider(Settings.EMPTY); + ResizeAllocationDecider resizeAllocationDecider = new ResizeAllocationDecider(); RoutingAllocation routingAllocation = new RoutingAllocation(null, null, clusterState, null, 0); ShardRouting shardRouting = TestShardRouting.newShardRouting("non-resize", 0, null, true, ShardRoutingState.UNASSIGNED); assertEquals(Decision.ALWAYS, resizeAllocationDecider.canAllocate(shardRouting, routingAllocation)); @@ -132,7 +132,7 @@ public class ResizeAllocationDeciderTests extends ESAllocationTestCase { .metaData(metaData).build(); Index idx = clusterState.metaData().index("target").getIndex(); - ResizeAllocationDecider resizeAllocationDecider = new ResizeAllocationDecider(Settings.EMPTY); + ResizeAllocationDecider resizeAllocationDecider = new ResizeAllocationDecider(); RoutingAllocation routingAllocation = new RoutingAllocation(null, null, clusterState, null, 0); ShardRouting shardRouting = TestShardRouting.newShardRouting(new ShardId(idx, 0), null, true, ShardRoutingState.UNASSIGNED, RecoverySource.LocalShardsRecoverySource.INSTANCE); @@ -160,7 +160,7 @@ public class ResizeAllocationDeciderTests extends ESAllocationTestCase { Index idx = clusterState.metaData().index("target").getIndex(); - ResizeAllocationDecider resizeAllocationDecider = new ResizeAllocationDecider(Settings.EMPTY); + ResizeAllocationDecider resizeAllocationDecider = new ResizeAllocationDecider(); RoutingAllocation routingAllocation = new RoutingAllocation(null, clusterState.getRoutingNodes(), clusterState, null, 0); int shardId = randomIntBetween(0, 3); int sourceShardId = IndexMetaData.selectSplitShard(shardId, clusterState.metaData().index("source"), 4).id(); @@ -200,7 +200,7 @@ public class ResizeAllocationDeciderTests extends ESAllocationTestCase { Index idx = clusterState.metaData().index("target").getIndex(); - ResizeAllocationDecider resizeAllocationDecider = new ResizeAllocationDecider(Settings.EMPTY); + ResizeAllocationDecider resizeAllocationDecider = new ResizeAllocationDecider(); RoutingAllocation routingAllocation = new RoutingAllocation(null, clusterState.getRoutingNodes(), clusterState, null, 0); int shardId = randomIntBetween(0, 3); int sourceShardId = IndexMetaData.selectSplitShard(shardId, clusterState.metaData().index("source"), 4).id(); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SameShardRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SameShardRoutingTests.java index f059125f1ea..9856bd064ca 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SameShardRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SameShardRoutingTests.java @@ -105,7 +105,7 @@ public class SameShardRoutingTests extends ESAllocationTestCase { Index index = clusterState.getMetaData().index("idx").getIndex(); ShardRouting primaryShard = clusterState.routingTable().index(index).shard(0).primaryShard(); RoutingNode routingNode = clusterState.getRoutingNodes().node(primaryShard.currentNodeId()); - RoutingAllocation routingAllocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, Collections.emptyList()), + RoutingAllocation routingAllocation = new RoutingAllocation(new AllocationDeciders(Collections.emptyList()), new RoutingNodes(clusterState, false), clusterState, ClusterInfo.EMPTY, System.nanoTime()); // can't force allocate same shard copy to the same node diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index d2e86c13d4f..24838b22d47 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -94,7 +94,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { final ClusterInfo clusterInfo = new DevNullClusterInfo(usages, usages, shardSizes); ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, + AllocationDeciders deciders = new AllocationDeciders( new HashSet<>(Arrays.asList( new SameShardAllocationDecider(Settings.EMPTY, clusterSettings), makeDecider(diskSettings)))); @@ -106,11 +106,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { return clusterInfo; } }; - AllocationService strategy = new AllocationService(Settings.builder() - .put("cluster.routing.allocation.node_concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") - .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), cis); + AllocationService strategy = new AllocationService(deciders, + new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), cis); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) @@ -183,16 +180,13 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), 0.7) .build(); - deciders = new AllocationDeciders(Settings.EMPTY, + deciders = new AllocationDeciders( new HashSet<>(Arrays.asList( new SameShardAllocationDecider(Settings.EMPTY, clusterSettings), makeDecider(diskSettings)))); - strategy = new AllocationService(Settings.builder() - .put("cluster.routing.allocation.node_concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") - .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), cis); + strategy = new AllocationService(deciders, new TestGatewayAllocator(), + new BalancedShardsAllocator(Settings.EMPTY), cis); clusterState = strategy.reroute(clusterState, "reroute"); logShardStates(clusterState); @@ -214,16 +208,13 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), 0.6) .build(); - deciders = new AllocationDeciders(Settings.EMPTY, + deciders = new AllocationDeciders( new HashSet<>(Arrays.asList( new SameShardAllocationDecider(Settings.EMPTY, clusterSettings), makeDecider(diskSettings)))); - strategy = new AllocationService(Settings.builder() - .put("cluster.routing.allocation.node_concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") - .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), cis); + strategy = new AllocationService(deciders, new TestGatewayAllocator(), + new BalancedShardsAllocator(Settings.EMPTY), cis); clusterState = strategy.reroute(clusterState, "reroute"); @@ -281,7 +272,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { final ClusterInfo clusterInfo = new DevNullClusterInfo(usages, usages, shardSizes); ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, + AllocationDeciders deciders = new AllocationDeciders( new HashSet<>(Arrays.asList( new SameShardAllocationDecider(Settings.EMPTY, clusterSettings), makeDecider(diskSettings)))); @@ -294,11 +285,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { } }; - AllocationService strategy = new AllocationService(Settings.builder() - .put("cluster.routing.allocation.node_concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") - .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), cis); + AllocationService strategy = new AllocationService(deciders, new TestGatewayAllocator(), + new BalancedShardsAllocator(Settings.EMPTY), cis); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(2)) @@ -346,11 +334,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { return clusterInfo2; } }; - strategy = new AllocationService(Settings.builder() - .put("cluster.routing.allocation.node_concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") - .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), cis); + strategy = new AllocationService(deciders, new TestGatewayAllocator(), + new BalancedShardsAllocator(Settings.EMPTY), cis); clusterState = strategy.reroute(clusterState, "reroute"); logShardStates(clusterState); @@ -406,16 +391,13 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.getKey(), "20b") .build(); - deciders = new AllocationDeciders(Settings.EMPTY, + deciders = new AllocationDeciders( new HashSet<>(Arrays.asList( new SameShardAllocationDecider(Settings.EMPTY, clusterSettings), makeDecider(diskSettings)))); - strategy = new AllocationService(Settings.builder() - .put("cluster.routing.allocation.node_concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") - .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), cis); + strategy = new AllocationService(deciders, new TestGatewayAllocator(), + new BalancedShardsAllocator(Settings.EMPTY), cis); clusterState = strategy.reroute(clusterState, "reroute"); logShardStates(clusterState); @@ -438,16 +420,13 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.getKey(), "30b") .build(); - deciders = new AllocationDeciders(Settings.EMPTY, + deciders = new AllocationDeciders( new HashSet<>(Arrays.asList( new SameShardAllocationDecider(Settings.EMPTY, clusterSettings), makeDecider(diskSettings)))); - strategy = new AllocationService(Settings.builder() - .put("cluster.routing.allocation.node_concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") - .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), cis); + strategy = new AllocationService(deciders, new TestGatewayAllocator(), + new BalancedShardsAllocator(Settings.EMPTY), cis); clusterState = strategy.reroute(clusterState, "reroute"); @@ -529,7 +508,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { ImmutableOpenMap shardSizes = shardSizesBuilder.build(); final ClusterInfo clusterInfo = new DevNullClusterInfo(usages, usages, shardSizes); - AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, + AllocationDeciders deciders = new AllocationDeciders( new HashSet<>(Arrays.asList( new SameShardAllocationDecider( Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) @@ -544,11 +523,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { } }; - AllocationService strategy = new AllocationService(Settings.builder() - .put("cluster.routing.allocation.node_concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") - .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), cis); + AllocationService strategy = new AllocationService(deciders, new TestGatewayAllocator(), + new BalancedShardsAllocator(Settings.EMPTY), cis); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(0)) @@ -596,7 +572,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { ImmutableOpenMap shardSizes = shardSizesBuilder.build(); final ClusterInfo clusterInfo = new DevNullClusterInfo(usages, usages, shardSizes); - AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, + AllocationDeciders deciders = new AllocationDeciders( new HashSet<>(Arrays.asList( new SameShardAllocationDecider( Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) @@ -611,11 +587,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { } }; - AllocationService strategy = new AllocationService(Settings.builder() - .put("cluster.routing.allocation.node_concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") - .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), cis); + AllocationService strategy = new AllocationService(deciders, new TestGatewayAllocator(), + new BalancedShardsAllocator(Settings.EMPTY), cis); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(0)) @@ -698,7 +671,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { final ClusterInfo clusterInfo = new DevNullClusterInfo(usages, usages, shardSizes); DiskThresholdDecider decider = makeDecider(diskSettings); - AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, + AllocationDeciders deciders = new AllocationDeciders( new HashSet<>(Arrays.asList(new SameShardAllocationDecider( Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) ), decider))); @@ -711,11 +684,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { } }; - AllocationService strategy = new AllocationService(Settings.builder() - .put("cluster.routing.allocation.node_concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") - .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), cis); + AllocationService strategy = new AllocationService(deciders, new TestGatewayAllocator(), + new BalancedShardsAllocator(Settings.EMPTY), cis); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) @@ -892,17 +862,14 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { return clusterInfo; } }; - AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList( + AllocationDeciders deciders = new AllocationDeciders(new HashSet<>(Arrays.asList( new SameShardAllocationDecider( Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) ), diskThresholdDecider ))); - AllocationService strategy = new AllocationService(Settings.builder() - .put("cluster.routing.allocation.node_concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") - .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), cis); + AllocationService strategy = new AllocationService(deciders, new TestGatewayAllocator(), + new BalancedShardsAllocator(Settings.EMPTY), cis); // Ensure that the reroute call doesn't alter the routing table, since the first primary is relocating away // and therefor we will have sufficient disk space on node1. ClusterState result = strategy.reroute(clusterState, "reroute"); @@ -989,19 +956,15 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { } }; - AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList( + AllocationDeciders deciders = new AllocationDeciders(new HashSet<>(Arrays.asList( new SameShardAllocationDecider( Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) ), diskThresholdDecider ))); - AllocationService strategy = new AllocationService(Settings.builder() - .put("cluster.routing.allocation.node_concurrent_recoveries", 10) - .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), "always") - - .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) - .build(), deciders, new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), cis); + AllocationService strategy = new AllocationService(deciders, new TestGatewayAllocator(), + new BalancedShardsAllocator(Settings.EMPTY), cis); ClusterState result = strategy.reroute(clusterState, "reroute"); assertThat(result.routingTable().index("test").getShards().get(0).primaryShard().state(), equalTo(STARTED)); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java index 4cddb747613..77967788adf 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java @@ -104,7 +104,7 @@ public class DiskThresholdDeciderUnitTests extends ESAllocationTestCase { shardSizes.put("[test][0][p]", 10L); // 10 bytes final ClusterInfo clusterInfo = new ClusterInfo(leastAvailableUsages.build(), mostAvailableUsage.build(), shardSizes.build(), ImmutableOpenMap.of()); - RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, Collections.singleton(decider)), + RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Collections.singleton(decider)), clusterState.getRoutingNodes(), clusterState, clusterInfo, System.nanoTime()); allocation.debugDecision(true); Decision decision = decider.canAllocate(test_0, new RoutingNode("node_0", node_0), allocation); @@ -159,7 +159,7 @@ public class DiskThresholdDeciderUnitTests extends ESAllocationTestCase { shardSizes.put("[test][0][p]", shardSize); ClusterInfo clusterInfo = new ClusterInfo(leastAvailableUsages.build(), mostAvailableUsage.build(), shardSizes.build(), ImmutableOpenMap.of()); - RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, Collections.singleton(decider)), + RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Collections.singleton(decider)), clusterState.getRoutingNodes(), clusterState, clusterInfo, System.nanoTime()); allocation.debugDecision(true); Decision decision = decider.canAllocate(test_0, new RoutingNode("node_0", node_0), allocation); @@ -240,7 +240,7 @@ public class DiskThresholdDeciderUnitTests extends ESAllocationTestCase { final ClusterInfo clusterInfo = new ClusterInfo(leastAvailableUsages.build(), mostAvailableUsage.build(), shardSizes.build(), shardRoutingMap.build()); - RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, Collections.singleton(decider)), + RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Collections.singleton(decider)), clusterState.getRoutingNodes(), clusterState, clusterInfo, System.nanoTime()); allocation.debugDecision(true); Decision decision = decider.canRemain(test_0, new RoutingNode("node_0", node_0), allocation); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java index 4d5639a05ea..70b728487ae 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java @@ -53,11 +53,11 @@ public class FilterAllocationDeciderTests extends ESAllocationTestCase { public void testFilterInitialRecovery() { ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); FilterAllocationDecider filterAllocationDecider = new FilterAllocationDecider(Settings.EMPTY, clusterSettings); - AllocationDeciders allocationDeciders = new AllocationDeciders(Settings.EMPTY, + AllocationDeciders allocationDeciders = new AllocationDeciders( Arrays.asList(filterAllocationDecider, new SameShardAllocationDecider(Settings.EMPTY, clusterSettings), - new ReplicaAfterPrimaryActiveAllocationDecider(Settings.EMPTY))); - AllocationService service = new AllocationService(Settings.builder().build(), allocationDeciders, + new ReplicaAfterPrimaryActiveAllocationDecider())); + AllocationService service = new AllocationService(allocationDeciders, new TestGatewayAllocator(), new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); ClusterState state = createInitialClusterState(service, Settings.builder().put("index.routing.allocation.initial_recovery._id", "node2").build()); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDeciderTests.java index 49d69272af6..86190b107e5 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/RestoreInProgressAllocationDeciderTests.java @@ -38,7 +38,6 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotId; @@ -186,8 +185,8 @@ public class RestoreInProgressAllocationDeciderTests extends ESAllocationTestCas } private Decision executeAllocation(final ClusterState clusterState, final ShardRouting shardRouting) { - final AllocationDecider decider = new RestoreInProgressAllocationDecider(Settings.EMPTY); - final RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, Collections.singleton(decider)), + final AllocationDecider decider = new RestoreInProgressAllocationDecider(); + final RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Collections.singleton(decider)), clusterState.getRoutingNodes(), clusterState, null, 0L); allocation.debugDecision(true); diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java index 17939f07766..9c94bfab7ac 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java @@ -41,7 +41,7 @@ import static org.hamcrest.CoreMatchers.equalTo; public class SettingsFilterTests extends ESTestCase { public void testAddingAndRemovingFilters() { HashSet hashSet = new HashSet<>(Arrays.asList("foo", "bar", "baz")); - SettingsFilter settingsFilter = new SettingsFilter(Settings.EMPTY, hashSet); + SettingsFilter settingsFilter = new SettingsFilter(hashSet); assertEquals(settingsFilter.getPatterns(), hashSet); } @@ -147,7 +147,7 @@ public class SettingsFilterTests extends ESTestCase { } private void testFiltering(Settings source, Settings filtered, String... patterns) throws IOException { - SettingsFilter settingsFilter = new SettingsFilter(Settings.EMPTY, Arrays.asList(patterns)); + SettingsFilter settingsFilter = new SettingsFilter(Arrays.asList(patterns)); // Test using direct filtering Settings filteredSettings = settingsFilter.filter(source); diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/FileBasedUnicastHostsProviderTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/FileBasedUnicastHostsProviderTests.java index b45daaadfa5..8fba12197ca 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/FileBasedUnicastHostsProviderTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/FileBasedUnicastHostsProviderTests.java @@ -114,7 +114,7 @@ public class FileBasedUnicastHostsProviderTests extends ESTestCase { public void testUnicastHostsDoesNotExist() { final Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); - final FileBasedUnicastHostsProvider provider = new FileBasedUnicastHostsProvider(settings, createTempDir().toAbsolutePath()); + final FileBasedUnicastHostsProvider provider = new FileBasedUnicastHostsProvider(createTempDir().toAbsolutePath()); final List addresses = provider.buildDynamicHosts((hosts, limitPortCounts) -> UnicastZenPing.resolveHostsLists(executorService, logger, hosts, limitPortCounts, transportService, TimeValue.timeValueSeconds(10))); @@ -148,7 +148,7 @@ public class FileBasedUnicastHostsProviderTests extends ESTestCase { writer.write(String.join("\n", hostEntries)); } - return new FileBasedUnicastHostsProvider(settings, configPath).buildDynamicHosts((hosts, limitPortCounts) -> + return new FileBasedUnicastHostsProvider(configPath).buildDynamicHosts((hosts, limitPortCounts) -> UnicastZenPing.resolveHostsLists(executorService, logger, hosts, limitPortCounts, transportService, TimeValue.timeValueSeconds(10))); } diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java index 9e57382bb4b..a3ae6b07b19 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java @@ -142,7 +142,7 @@ public class NodeJoinControllerTests extends ESTestCase { } masterService = ClusterServiceUtils.createMasterService(threadPool, initialState); nodeJoinController = new NodeJoinController(masterService, createAllocationService(Settings.EMPTY), - new ElectMasterService(Settings.EMPTY), Settings.EMPTY); + new ElectMasterService(Settings.EMPTY)); } public void testSimpleJoinAccumulation() throws InterruptedException, ExecutionException { diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/PublishClusterStateActionTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/PublishClusterStateActionTests.java index ac1719269e7..23b118ebbed 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/PublishClusterStateActionTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/PublishClusterStateActionTests.java @@ -267,7 +267,6 @@ public class PublishClusterStateActionTests extends ESTestCase { new DiscoverySettings(settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(ClusterModule.getNamedWriteables()); return new MockPublishAction( - settings, transportService, namedWriteableRegistry, listener, @@ -873,9 +872,9 @@ public class PublishClusterStateActionTests extends ESTestCase { AtomicBoolean timeoutOnCommit = new AtomicBoolean(); AtomicBoolean errorOnCommit = new AtomicBoolean(); - public MockPublishAction(Settings settings, TransportService transportService, NamedWriteableRegistry namedWriteableRegistry, + public MockPublishAction(TransportService transportService, NamedWriteableRegistry namedWriteableRegistry, IncomingClusterStateListener listener, DiscoverySettings discoverySettings) { - super(settings, transportService, namedWriteableRegistry, listener, discoverySettings); + super(transportService, namedWriteableRegistry, listener, discoverySettings); } @Override diff --git a/server/src/test/java/org/elasticsearch/gateway/DanglingIndicesStateTests.java b/server/src/test/java/org/elasticsearch/gateway/DanglingIndicesStateTests.java index 4502e32e6d3..9593b58eae9 100644 --- a/server/src/test/java/org/elasticsearch/gateway/DanglingIndicesStateTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/DanglingIndicesStateTests.java @@ -47,7 +47,7 @@ public class DanglingIndicesStateTests extends ESTestCase { public void testCleanupWhenEmpty() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { - MetaStateService metaStateService = new MetaStateService(Settings.EMPTY, env, xContentRegistry()); + MetaStateService metaStateService = new MetaStateService(env, xContentRegistry()); DanglingIndicesState danglingState = createDanglingIndicesState(env, metaStateService); assertTrue(danglingState.getDanglingIndices().isEmpty()); @@ -58,7 +58,7 @@ public class DanglingIndicesStateTests extends ESTestCase { } public void testDanglingIndicesDiscovery() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { - MetaStateService metaStateService = new MetaStateService(Settings.EMPTY, env, xContentRegistry()); + MetaStateService metaStateService = new MetaStateService(env, xContentRegistry()); DanglingIndicesState danglingState = createDanglingIndicesState(env, metaStateService); assertTrue(danglingState.getDanglingIndices().isEmpty()); @@ -76,7 +76,7 @@ public class DanglingIndicesStateTests extends ESTestCase { public void testInvalidIndexFolder() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { - MetaStateService metaStateService = new MetaStateService(Settings.EMPTY, env, xContentRegistry()); + MetaStateService metaStateService = new MetaStateService(env, xContentRegistry()); DanglingIndicesState danglingState = createDanglingIndicesState(env, metaStateService); MetaData metaData = MetaData.builder().build(); @@ -100,7 +100,7 @@ public class DanglingIndicesStateTests extends ESTestCase { public void testDanglingProcessing() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { - MetaStateService metaStateService = new MetaStateService(Settings.EMPTY, env, xContentRegistry()); + MetaStateService metaStateService = new MetaStateService(env, xContentRegistry()); DanglingIndicesState danglingState = createDanglingIndicesState(env, metaStateService); MetaData metaData = MetaData.builder().build(); @@ -144,7 +144,7 @@ public class DanglingIndicesStateTests extends ESTestCase { public void testDanglingIndicesNotImportedWhenTombstonePresent() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { - MetaStateService metaStateService = new MetaStateService(Settings.EMPTY, env, xContentRegistry()); + MetaStateService metaStateService = new MetaStateService(env, xContentRegistry()); DanglingIndicesState danglingState = createDanglingIndicesState(env, metaStateService); final Settings.Builder settings = Settings.builder().put(indexSettings).put(IndexMetaData.SETTING_INDEX_UUID, "test1UUID"); @@ -159,7 +159,6 @@ public class DanglingIndicesStateTests extends ESTestCase { } private DanglingIndicesState createDanglingIndicesState(NodeEnvironment env, MetaStateService metaStateService) { - return new DanglingIndicesState(Settings.EMPTY, env, metaStateService, null, - mock(ClusterService.class)); + return new DanglingIndicesState(env, metaStateService, null, mock(ClusterService.class)); } } diff --git a/server/src/test/java/org/elasticsearch/gateway/MetaStateServiceTests.java b/server/src/test/java/org/elasticsearch/gateway/MetaStateServiceTests.java index d0bf02e3c4e..938c28fe855 100644 --- a/server/src/test/java/org/elasticsearch/gateway/MetaStateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/MetaStateServiceTests.java @@ -38,7 +38,7 @@ public class MetaStateServiceTests extends ESTestCase { public void testWriteLoadIndex() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { - MetaStateService metaStateService = new MetaStateService(Settings.EMPTY, env, xContentRegistry()); + MetaStateService metaStateService = new MetaStateService(env, xContentRegistry()); IndexMetaData index = IndexMetaData.builder("test1").settings(indexSettings).build(); metaStateService.writeIndex("test_write", index); @@ -48,14 +48,14 @@ public class MetaStateServiceTests extends ESTestCase { public void testLoadMissingIndex() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { - MetaStateService metaStateService = new MetaStateService(Settings.EMPTY, env, xContentRegistry()); + MetaStateService metaStateService = new MetaStateService(env, xContentRegistry()); assertThat(metaStateService.loadIndexState(new Index("test1", "test1UUID")), nullValue()); } } public void testWriteLoadGlobal() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { - MetaStateService metaStateService = new MetaStateService(Settings.EMPTY, env, xContentRegistry()); + MetaStateService metaStateService = new MetaStateService(env, xContentRegistry()); MetaData metaData = MetaData.builder() .persistentSettings(Settings.builder().put("test1", "value1").build()) @@ -67,7 +67,7 @@ public class MetaStateServiceTests extends ESTestCase { public void testWriteGlobalStateWithIndexAndNoIndexIsLoaded() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { - MetaStateService metaStateService = new MetaStateService(Settings.EMPTY, env, xContentRegistry()); + MetaStateService metaStateService = new MetaStateService(env, xContentRegistry()); MetaData metaData = MetaData.builder() .persistentSettings(Settings.builder().put("test1", "value1").build()) @@ -83,7 +83,7 @@ public class MetaStateServiceTests extends ESTestCase { public void testLoadGlobal() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { - MetaStateService metaStateService = new MetaStateService(Settings.EMPTY, env, xContentRegistry()); + MetaStateService metaStateService = new MetaStateService(env, xContentRegistry()); IndexMetaData index = IndexMetaData.builder("test1").settings(indexSettings).build(); MetaData metaData = MetaData.builder() diff --git a/server/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java b/server/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java index ac35d8d136e..e6fc0c535df 100644 --- a/server/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java @@ -214,7 +214,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { */ public void testForceAllocatePrimary() { testAllocator.addData(node1, "allocId1", randomBoolean()); - AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, Arrays.asList( + AllocationDeciders deciders = new AllocationDeciders(Arrays.asList( // since the deciders return a NO decision for allocating a shard (due to the guaranteed NO decision from the second decider), // the allocator will see if it can force assign the primary, where the decision will be YES new TestAllocateDecision(randomBoolean() ? Decision.YES : Decision.NO), getNoDeciderThatAllowsForceAllocate() @@ -235,7 +235,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { public void testDontAllocateOnNoOrThrottleForceAllocationDecision() { testAllocator.addData(node1, "allocId1", randomBoolean()); boolean forceDecisionNo = randomBoolean(); - AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, Arrays.asList( + AllocationDeciders deciders = new AllocationDeciders(Arrays.asList( // since both deciders here return a NO decision for allocating a shard, // the allocator will see if it can force assign the primary, where the decision will be either NO or THROTTLE, // so the shard will remain un-initialized @@ -258,7 +258,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { */ public void testDontForceAllocateOnThrottleDecision() { testAllocator.addData(node1, "allocId1", randomBoolean()); - AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, Arrays.asList( + AllocationDeciders deciders = new AllocationDeciders(Arrays.asList( // since we have a NO decision for allocating a shard (because the second decider returns a NO decision), // the allocator will see if it can force assign the primary, and in this case, // the TestAllocateDecision's decision for force allocating is to THROTTLE (using @@ -467,10 +467,6 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { private Map data; - TestAllocator() { - super(Settings.EMPTY); - } - public TestAllocator clear() { data = null; return this; diff --git a/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java b/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java index a63a76e7154..d30f7eafce4 100644 --- a/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java @@ -217,11 +217,11 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { * to wait till throttling on it is done. */ public void testThrottleWhenAllocatingToMatchingNode() { - RoutingAllocation allocation = onePrimaryOnNode1And1Replica(new AllocationDeciders(Settings.EMPTY, + RoutingAllocation allocation = onePrimaryOnNode1And1Replica(new AllocationDeciders( Arrays.asList(new TestAllocateDecision(Decision.YES), new SameShardAllocationDecider( Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), - new AllocationDecider(Settings.EMPTY) { + new AllocationDecider() { @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { if (node.node().equals(node2)) { @@ -356,10 +356,6 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { private Map data = null; private AtomicBoolean fetchDataCalled = new AtomicBoolean(false); - TestAllocator() { - super(Settings.EMPTY); - } - public void clean() { data = null; } diff --git a/server/src/test/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncActionTests.java b/server/src/test/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncActionTests.java index 0888dfd3c40..21439e3aa40 100644 --- a/server/src/test/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncActionTests.java @@ -60,7 +60,7 @@ public class GlobalCheckpointSyncActionTests extends ESTestCase { TransportService.NOOP_TRANSPORT_INTERCEPTOR, boundAddress -> clusterService.localNode(), null, Collections.emptySet()); transportService.start(); transportService.acceptIncomingRequests(); - shardStateAction = new ShardStateAction(Settings.EMPTY, clusterService, transportService, null, null, threadPool); + shardStateAction = new ShardStateAction(clusterService, transportService, null, null, threadPool); } public void tearDown() throws Exception { @@ -110,7 +110,7 @@ public class GlobalCheckpointSyncActionTests extends ESTestCase { threadPool, shardStateAction, new ActionFilters(Collections.emptySet()), - new IndexNameExpressionResolver(Settings.EMPTY)); + new IndexNameExpressionResolver()); final GlobalCheckpointSyncAction.Request primaryRequest = new GlobalCheckpointSyncAction.Request(indexShard.shardId()); if (randomBoolean()) { action.shardOperationOnPrimary(primaryRequest, indexShard); diff --git a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java index 28e625b34df..62f6a7b2345 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java @@ -68,7 +68,7 @@ public class PrimaryReplicaSyncerTests extends IndexShardTestCase { assertThat(parentTask, instanceOf(PrimaryReplicaSyncer.ResyncTask.class)); listener.onResponse(new ResyncReplicationResponse()); }; - PrimaryReplicaSyncer syncer = new PrimaryReplicaSyncer(Settings.EMPTY, taskManager, syncAction); + PrimaryReplicaSyncer syncer = new PrimaryReplicaSyncer(taskManager, syncAction); syncer.setChunkSize(new ByteSizeValue(randomIntBetween(1, 10))); int numDocs = randomInt(10); @@ -136,7 +136,7 @@ public class PrimaryReplicaSyncerTests extends IndexShardTestCase { syncActionCalled.set(true); threadPool.generic().execute(() -> listener.onResponse(new ResyncReplicationResponse())); }; - PrimaryReplicaSyncer syncer = new PrimaryReplicaSyncer(Settings.EMPTY, + PrimaryReplicaSyncer syncer = new PrimaryReplicaSyncer( new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()), syncAction); syncer.setChunkSize(new ByteSizeValue(1)); // every document is sent off separately diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java index 8268d4ea5e0..797d2e51fa6 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java @@ -127,19 +127,17 @@ public class ClusterStateChanges extends AbstractComponent { private final NodeJoinController.JoinTaskExecutor joinTaskExecutor; public ClusterStateChanges(NamedXContentRegistry xContentRegistry, ThreadPool threadPool) { - super(SETTINGS); - ClusterSettings clusterSettings = new ClusterSettings(SETTINGS, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - allocationService = new AllocationService(SETTINGS, new AllocationDeciders(SETTINGS, + allocationService = new AllocationService(new AllocationDeciders( new HashSet<>(Arrays.asList(new SameShardAllocationDecider(SETTINGS, clusterSettings), - new ReplicaAfterPrimaryActiveAllocationDecider(SETTINGS), + new ReplicaAfterPrimaryActiveAllocationDecider(), new RandomAllocationDeciderTests.RandomAllocationDecider(getRandom())))), new TestGatewayAllocator(), new BalancedShardsAllocator(SETTINGS), EmptyClusterInfoService.INSTANCE); shardFailedClusterStateTaskExecutor = new ShardStateAction.ShardFailedClusterStateTaskExecutor(allocationService, null, logger); shardStartedClusterStateTaskExecutor = new ShardStateAction.ShardStartedClusterStateTaskExecutor(allocationService, logger); ActionFilters actionFilters = new ActionFilters(Collections.emptySet()); - IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(SETTINGS); + IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(); DestructiveOperations destructiveOperations = new DestructiveOperations(SETTINGS, clusterSettings); Environment environment = TestEnvironment.newEnvironment(SETTINGS); Transport transport = mock(Transport.class); // it's not used @@ -180,13 +178,13 @@ public class ClusterStateChanges extends AbstractComponent { return indexMetaData; } }; - MetaDataIndexStateService indexStateService = new MetaDataIndexStateService(SETTINGS, clusterService, allocationService, + MetaDataIndexStateService indexStateService = new MetaDataIndexStateService(clusterService, allocationService, metaDataIndexUpgradeService, indicesService, threadPool); MetaDataDeleteIndexService deleteIndexService = new MetaDataDeleteIndexService(SETTINGS, clusterService, allocationService); - MetaDataUpdateSettingsService metaDataUpdateSettingsService = new MetaDataUpdateSettingsService(SETTINGS, clusterService, + MetaDataUpdateSettingsService metaDataUpdateSettingsService = new MetaDataUpdateSettingsService(clusterService, allocationService, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, indicesService, threadPool); MetaDataCreateIndexService createIndexService = new MetaDataCreateIndexService(SETTINGS, clusterService, indicesService, - allocationService, new AliasValidator(SETTINGS), environment, + allocationService, new AliasValidator(), environment, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, threadPool, xContentRegistry, true); transportCloseIndexAction = new TransportCloseIndexAction(SETTINGS, transportService, clusterService, threadPool, diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java b/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java index 39091ce04ec..4625aa04be3 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java @@ -462,7 +462,7 @@ public class IndicesClusterStateServiceRandomUpdatesTests extends AbstractIndice final ClusterService clusterService = mock(ClusterService.class); final RepositoriesService repositoriesService = new RepositoriesService(settings, clusterService, transportService, null, threadPool); - final PeerRecoveryTargetService recoveryTargetService = new PeerRecoveryTargetService(settings, threadPool, + final PeerRecoveryTargetService recoveryTargetService = new PeerRecoveryTargetService(threadPool, transportService, null, clusterService); final ShardStateAction shardStateAction = mock(ShardStateAction.class); final PrimaryReplicaSyncer primaryReplicaSyncer = mock(PrimaryReplicaSyncer.class); diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoverySourceServiceTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoverySourceServiceTests.java index 524795bfa24..72eb2baeca9 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoverySourceServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoverySourceServiceTests.java @@ -37,7 +37,7 @@ public class PeerRecoverySourceServiceTests extends IndexShardTestCase { public void testDuplicateRecoveries() throws IOException { IndexShard primary = newStartedShard(true); - PeerRecoverySourceService peerRecoverySourceService = new PeerRecoverySourceService(Settings.EMPTY, + PeerRecoverySourceService peerRecoverySourceService = new PeerRecoverySourceService( mock(TransportService.class), mock(IndicesService.class), new RecoverySettings(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))); StartRecoveryRequest startRecoveryRequest = new StartRecoveryRequest(primary.shardId(), randomAlphaOfLength(10), diff --git a/server/src/test/java/org/elasticsearch/monitor/fs/FsProbeTests.java b/server/src/test/java/org/elasticsearch/monitor/fs/FsProbeTests.java index 234524f16f4..873ebf17f23 100644 --- a/server/src/test/java/org/elasticsearch/monitor/fs/FsProbeTests.java +++ b/server/src/test/java/org/elasticsearch/monitor/fs/FsProbeTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.monitor.fs; import org.apache.lucene.util.Constants; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.NodeEnvironment.NodePath; import org.elasticsearch.test.ESTestCase; @@ -51,7 +50,7 @@ public class FsProbeTests extends ESTestCase { public void testFsInfo() throws IOException { try (NodeEnvironment env = newNodeEnvironment()) { - FsProbe probe = new FsProbe(Settings.EMPTY, env); + FsProbe probe = new FsProbe(env); FsInfo stats = probe.stats(null, null); assertNotNull(stats); @@ -166,7 +165,7 @@ public class FsProbeTests extends ESTestCase { " 253 1 dm-1 112 0 4624 13 0 0 0 0 0 5 13", " 253 2 dm-2 47802 0 710658 49312 1371977 0 64126096 33730596 0 1058193 33781827")); - final FsProbe probe = new FsProbe(Settings.EMPTY, null) { + final FsProbe probe = new FsProbe(null) { @Override List readProcDiskStats() throws IOException { return diskStats.get(); diff --git a/server/src/test/java/org/elasticsearch/node/ResponseCollectorServiceTests.java b/server/src/test/java/org/elasticsearch/node/ResponseCollectorServiceTests.java index 8aa0f3ec5ba..5fedfa7869e 100644 --- a/server/src/test/java/org/elasticsearch/node/ResponseCollectorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/node/ResponseCollectorServiceTests.java @@ -53,7 +53,7 @@ public class ResponseCollectorServiceTests extends ESTestCase { clusterService = new ClusterService(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), threadpool); - collector = new ResponseCollectorService(Settings.EMPTY, clusterService); + collector = new ResponseCollectorService(clusterService); } @After diff --git a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksClusterServiceTests.java b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksClusterServiceTests.java index f13a35613d5..3eec748808e 100644 --- a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksClusterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksClusterServiceTests.java @@ -640,8 +640,8 @@ public class PersistentTasksClusterServiceTests extends ESTestCase { /** Creates a PersistentTasksClusterService with a single PersistentTasksExecutor implemented by a BiFunction **/ private

    PersistentTasksClusterService createService(final BiFunction fn) { - PersistentTasksExecutorRegistry registry = new PersistentTasksExecutorRegistry(Settings.EMPTY, - singleton(new PersistentTasksExecutor

    (Settings.EMPTY, TestPersistentTasksExecutor.NAME, null) { + PersistentTasksExecutorRegistry registry = new PersistentTasksExecutorRegistry( + singleton(new PersistentTasksExecutor

    (TestPersistentTasksExecutor.NAME, null) { @Override public Assignment getAssignment(P params, ClusterState clusterState) { return fn.apply(params, clusterState); diff --git a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksDecidersTestCase.java b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksDecidersTestCase.java index 655a21a5f53..7f2dada7c4c 100644 --- a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksDecidersTestCase.java +++ b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksDecidersTestCase.java @@ -58,10 +58,10 @@ public abstract class PersistentTasksDecidersTestCase extends ESTestCase { public void setUp() throws Exception { super.setUp(); clusterService = createClusterService(threadPool); - PersistentTasksExecutorRegistry registry = new PersistentTasksExecutorRegistry(clusterService.getSettings(), emptyList()) { + PersistentTasksExecutorRegistry registry = new PersistentTasksExecutorRegistry(emptyList()) { @Override public PersistentTasksExecutor getPersistentTaskExecutorSafe(String taskName) { - return new PersistentTasksExecutor(clusterService.getSettings(), taskName, null) { + return new PersistentTasksExecutor(taskName, null) { @Override protected void nodeOperation(AllocatedPersistentTask task, Params params, PersistentTaskState state) { logger.debug("Executing task {}", task); diff --git a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java index 50bcf594926..8aa553639cc 100644 --- a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java +++ b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java @@ -106,10 +106,10 @@ public class PersistentTasksNodeServiceTests extends ESTestCase { when(action.createTask(anyLong(), anyString(), anyString(), eq(parentId), any(), any())).thenReturn( new TestPersistentTasksPlugin.TestTask(i, "persistent", "test", "", parentId, Collections.emptyMap())); } - PersistentTasksExecutorRegistry registry = new PersistentTasksExecutorRegistry(Settings.EMPTY, Collections.singletonList(action)); + PersistentTasksExecutorRegistry registry = new PersistentTasksExecutorRegistry(Collections.singletonList(action)); MockExecutor executor = new MockExecutor(); - PersistentTasksNodeService coordinator = new PersistentTasksNodeService(Settings.EMPTY, persistentTasksService, + PersistentTasksNodeService coordinator = new PersistentTasksNodeService(persistentTasksService, registry, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()), executor); ClusterState state = createInitialClusterState(nonLocalNodesCount, Settings.EMPTY); @@ -202,10 +202,10 @@ public class PersistentTasksNodeServiceTests extends ESTestCase { AllocatedPersistentTask nodeTask = new TestPersistentTasksPlugin.TestTask(0, "persistent", "test", "", parentId, Collections.emptyMap()); when(action.createTask(anyLong(), anyString(), anyString(), eq(parentId), any(), any())).thenReturn(nodeTask); - PersistentTasksExecutorRegistry registry = new PersistentTasksExecutorRegistry(Settings.EMPTY, Collections.singletonList(action)); + PersistentTasksExecutorRegistry registry = new PersistentTasksExecutorRegistry(Collections.singletonList(action)); MockExecutor executor = new MockExecutor(); - PersistentTasksNodeService coordinator = new PersistentTasksNodeService(Settings.EMPTY, persistentTasksService, + PersistentTasksNodeService coordinator = new PersistentTasksNodeService(persistentTasksService, registry, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()), executor); ClusterState state = createInitialClusterState(1, Settings.EMPTY); @@ -231,7 +231,7 @@ public class PersistentTasksNodeServiceTests extends ESTestCase { public void testTaskCancellation() { AtomicLong capturedTaskId = new AtomicLong(); AtomicReference> capturedListener = new AtomicReference<>(); - PersistentTasksService persistentTasksService = new PersistentTasksService(Settings.EMPTY, null, null, null) { + PersistentTasksService persistentTasksService = new PersistentTasksService(null, null, null) { @Override void sendCancelRequest(final long taskId, final String reason, final ActionListener listener) { capturedTaskId.set(taskId); @@ -250,12 +250,12 @@ public class PersistentTasksNodeServiceTests extends ESTestCase { when(action.createTask(anyLong(), anyString(), anyString(), any(), any(), any())) .thenReturn(new TestPersistentTasksPlugin.TestTask(1, "persistent", "test", "", new TaskId("cluster", 1), Collections.emptyMap())); - PersistentTasksExecutorRegistry registry = new PersistentTasksExecutorRegistry(Settings.EMPTY, Collections.singletonList(action)); + PersistentTasksExecutorRegistry registry = new PersistentTasksExecutorRegistry(Collections.singletonList(action)); int nonLocalNodesCount = randomInt(10); MockExecutor executor = new MockExecutor(); TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()); - PersistentTasksNodeService coordinator = new PersistentTasksNodeService(Settings.EMPTY, persistentTasksService, + PersistentTasksNodeService coordinator = new PersistentTasksNodeService(persistentTasksService, registry, taskManager, executor); ClusterState state = createInitialClusterState(nonLocalNodesCount, Settings.EMPTY); diff --git a/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java b/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java index 745b8836569..2e99b83378b 100644 --- a/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java +++ b/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java @@ -91,7 +91,7 @@ public class TestPersistentTasksPlugin extends Plugin implements ActionPlugin, P @Override public List> getPersistentTasksExecutor(ClusterService clusterService, ThreadPool threadPool, Client client) { - return Collections.singletonList(new TestPersistentTasksExecutor(Settings.EMPTY, clusterService)); + return Collections.singletonList(new TestPersistentTasksExecutor(clusterService)); } @Override @@ -292,8 +292,8 @@ public class TestPersistentTasksPlugin extends Plugin implements ActionPlugin, P public static final String NAME = "cluster:admin/persistent/test"; private final ClusterService clusterService; - public TestPersistentTasksExecutor(Settings settings, ClusterService clusterService) { - super(settings, NAME, ThreadPool.Names.GENERIC); + public TestPersistentTasksExecutor(ClusterService clusterService) { + super(NAME, ThreadPool.Names.GENERIC); this.clusterService = clusterService; } diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index d35a8b5d249..8b723f144c1 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -37,7 +37,6 @@ import org.elasticsearch.common.xcontent.yaml.YamlXContent; import org.elasticsearch.http.HttpInfo; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.HttpStats; -import org.elasticsearch.http.HttpTransportSettings; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; @@ -85,12 +84,12 @@ public class RestControllerTests extends ESTestCase { .put(HierarchyCircuitBreakerService.USE_REAL_MEMORY_USAGE_SETTING.getKey(), false) .build(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); - usageService = new UsageService(settings); + usageService = new UsageService(); // we can do this here only because we know that we don't adjust breaker settings dynamically in the test inFlightRequestsBreaker = circuitBreakerService.getBreaker(CircuitBreaker.IN_FLIGHT_REQUESTS); HttpServerTransport httpServerTransport = new TestHttpServerTransport(); - restController = new RestController(settings, Collections.emptySet(), null, null, circuitBreakerService, usageService); + restController = new RestController(Collections.emptySet(), null, null, circuitBreakerService, usageService); restController.registerHandler(RestRequest.Method.GET, "/", (request, channel, client) -> channel.sendResponse( new BytesRestResponse(RestStatus.OK, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY))); @@ -104,7 +103,7 @@ public class RestControllerTests extends ESTestCase { public void testApplyRelevantHeaders() throws Exception { final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); Set headers = new HashSet<>(Arrays.asList("header.1", "header.2")); - final RestController restController = new RestController(Settings.EMPTY, headers, null, null, circuitBreakerService, usageService); + final RestController restController = new RestController(headers, null, null, circuitBreakerService, usageService); Map> restHeaders = new HashMap<>(); restHeaders.put("header.1", Collections.singletonList("true")); restHeaders.put("header.2", Collections.singletonList("true")); @@ -137,8 +136,7 @@ public class RestControllerTests extends ESTestCase { } public void testCanTripCircuitBreaker() throws Exception { - RestController controller = new RestController(Settings.EMPTY, Collections.emptySet(), null, null, circuitBreakerService, - usageService); + RestController controller = new RestController(Collections.emptySet(), null, null, circuitBreakerService, usageService); // trip circuit breaker by default controller.registerHandler(RestRequest.Method.GET, "/trip", new FakeRestHandler(true)); controller.registerHandler(RestRequest.Method.GET, "/do-not-trip", new FakeRestHandler(false)); @@ -210,8 +208,8 @@ public class RestControllerTests extends ESTestCase { assertSame(handler, h); return (RestRequest request, RestChannel channel, NodeClient client) -> wrapperCalled.set(true); }; - final RestController restController = new RestController(Settings.EMPTY, Collections.emptySet(), wrapper, null, - circuitBreakerService, usageService); + final RestController restController = new RestController(Collections.emptySet(), wrapper, null, + circuitBreakerService, usageService); restController.dispatchRequest(new FakeRestRequest.Builder(xContentRegistry()).build(), null, null, Optional.of(handler)); assertTrue(wrapperCalled.get()); assertFalse(handlerCalled.get()); @@ -291,9 +289,7 @@ public class RestControllerTests extends ESTestCase { String content = randomAlphaOfLength((int) Math.round(BREAKER_LIMIT.getBytes() / inFlightRequestsBreaker.getOverhead())); RestRequest request = testRestRequest("/", content, null); AssertingChannel channel = new AssertingChannel(request, true, RestStatus.NOT_ACCEPTABLE); - restController = new RestController( - Settings.builder().put(HttpTransportSettings.SETTING_HTTP_CONTENT_TYPE_REQUIRED.getKey(), true).build(), - Collections.emptySet(), null, null, circuitBreakerService, usageService); + restController = new RestController(Collections.emptySet(), null, null, circuitBreakerService, usageService); restController.registerHandler(RestRequest.Method.GET, "/", (r, c, client) -> c.sendResponse( new BytesRestResponse(RestStatus.OK, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY))); diff --git a/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java b/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java index e5e8bce6d6d..6a4a8749397 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java @@ -87,8 +87,8 @@ public class RestHttpResponseHeadersTests extends ESTestCase { new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); final Settings settings = Settings.EMPTY; - UsageService usageService = new UsageService(settings); - RestController restController = new RestController(settings, Collections.emptySet(), + UsageService usageService = new UsageService(); + RestController restController = new RestController(Collections.emptySet(), null, null, circuitBreakerService, usageService); // A basic RestHandler handles requests to the endpoint diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsActionTests.java index 29b19739e75..79d04e28161 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsActionTests.java @@ -52,7 +52,7 @@ public class RestClusterGetSettingsActionTests extends ESTestCase { final Settings settings = Settings.builder().put("foo.filtered", "bar").put("foo.non_filtered", "baz").build(); md.accept(mdBuilder, settings); final ClusterState.Builder builder = new ClusterState.Builder(ClusterState.EMPTY_STATE).metaData(mdBuilder); - final SettingsFilter filter = new SettingsFilter(Settings.EMPTY, Collections.singleton("foo.filtered")); + final SettingsFilter filter = new SettingsFilter(Collections.singleton("foo.filtered")); final Setting.Property[] properties = {Setting.Property.Dynamic, Setting.Property.Filtered, Setting.Property.NodeScope}; final Set> settingsSet = Stream.concat( ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.stream(), diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsActionTests.java index 640b97605af..330dee49f9f 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsActionTests.java @@ -44,9 +44,9 @@ public class RestNodesStatsActionTests extends ESTestCase { @Override public void setUp() throws Exception { super.setUp(); - UsageService usageService = new UsageService(Settings.EMPTY); + UsageService usageService = new UsageService(); action = new RestNodesStatsAction(Settings.EMPTY, - new RestController(Settings.EMPTY, Collections.emptySet(), null, null, null, usageService)); + new RestController(Collections.emptySet(), null, null, null, usageService)); } public void testUnrecognizedMetric() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsActionTests.java index 26c1e1fa177..1eda721f53b 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsActionTests.java @@ -42,9 +42,9 @@ public class RestIndicesStatsActionTests extends ESTestCase { @Override public void setUp() throws Exception { super.setUp(); - UsageService usageService = new UsageService(Settings.EMPTY); + UsageService usageService = new UsageService(); action = new RestIndicesStatsAction(Settings.EMPTY, - new RestController(Settings.EMPTY, Collections.emptySet(), null, null, null, usageService)); + new RestController(Collections.emptySet(), null, null, null, usageService)); } public void testUnrecognizedMetric() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java index 980a2c2e34e..9a9ecef5aad 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryActionTests.java @@ -55,8 +55,8 @@ public class RestValidateQueryActionTests extends AbstractSearchTestCase { private static ThreadPool threadPool = new TestThreadPool(RestValidateQueryActionTests.class.getName()); private static NodeClient client = new NodeClient(Settings.EMPTY, threadPool); - private static UsageService usageService = new UsageService(Settings.EMPTY); - private static RestController controller = new RestController(Settings.EMPTY, emptySet(), null, client, null, usageService); + private static UsageService usageService = new UsageService(); + private static RestController controller = new RestController(emptySet(), null, client, null, usageService); private static RestValidateQueryAction action = new RestValidateQueryAction(Settings.EMPTY, controller); /** diff --git a/server/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java index 4535bf7a91b..13e94f7fe53 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java @@ -75,9 +75,9 @@ public class RestIndicesActionTests extends ESTestCase { public void testBuildTable() { final Settings settings = Settings.EMPTY; - UsageService usageService = new UsageService(settings); - final RestController restController = new RestController(settings, Collections.emptySet(), null, null, null, usageService); - final RestIndicesAction action = new RestIndicesAction(settings, restController, new IndexNameExpressionResolver(settings)); + UsageService usageService = new UsageService(); + final RestController restController = new RestController(Collections.emptySet(), null, null, null, usageService); + final RestIndicesAction action = new RestIndicesAction(settings, restController, new IndexNameExpressionResolver()); // build a (semi-)random table final int numIndices = randomIntBetween(0, 5); diff --git a/server/src/test/java/org/elasticsearch/rest/action/cat/RestNodesActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/cat/RestNodesActionTests.java index 32993a6b7c7..bf3d40af5e0 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/cat/RestNodesActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/cat/RestNodesActionTests.java @@ -47,9 +47,9 @@ public class RestNodesActionTests extends ESTestCase { @Before public void setUpAction() { - UsageService usageService = new UsageService(Settings.EMPTY); + UsageService usageService = new UsageService(); action = new RestNodesAction(Settings.EMPTY, - new RestController(Settings.EMPTY, Collections.emptySet(), null, null, null, usageService)); + new RestController(Collections.emptySet(), null, null, null, usageService)); } public void testBuildTableDoesNotThrowGivenNullNodeInfoAndStats() { diff --git a/server/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java index e99fb4cc1f2..25f04532ac8 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java @@ -51,8 +51,8 @@ public class RestRecoveryActionTests extends ESTestCase { public void testRestRecoveryAction() { final Settings settings = Settings.EMPTY; - UsageService usageService = new UsageService(settings); - final RestController restController = new RestController(settings, Collections.emptySet(), null, null, null, usageService); + UsageService usageService = new UsageService(); + final RestController restController = new RestController(Collections.emptySet(), null, null, null, usageService); final RestRecoveryAction action = new RestRecoveryAction(settings, restController); final int totalShards = randomIntBetween(1, 32); final int successfulShards = Math.max(0, totalShards - randomIntBetween(1, 2)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorTests.java index 368f3043579..d47d9006b7f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorTests.java @@ -199,7 +199,7 @@ public class MedianAbsoluteDeviationAggregatorTests extends AggregatorTestCase { } public static IsCloseToRelative closeToRelative(double expected) { - return closeToRelative(expected, 0.05); + return closeToRelative(expected, 0.1); } } diff --git a/server/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/server/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index 7f008d8721a..e67b981bf81 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/server/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -977,7 +977,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest final String repo = "repo"; final AdminClient admin = client().admin(); - final IndexNameExpressionResolver nameExpressionResolver = new IndexNameExpressionResolver(Settings.EMPTY); + final IndexNameExpressionResolver nameExpressionResolver = new IndexNameExpressionResolver(); final String snapshotName = ""; logger.info("--> creating repository"); diff --git a/server/src/test/java/org/elasticsearch/usage/UsageServiceTests.java b/server/src/test/java/org/elasticsearch/usage/UsageServiceTests.java index c6347b014bf..c38030afbe9 100644 --- a/server/src/test/java/org/elasticsearch/usage/UsageServiceTests.java +++ b/server/src/test/java/org/elasticsearch/usage/UsageServiceTests.java @@ -52,7 +52,7 @@ public class UsageServiceTests extends ESTestCase { BaseRestHandler handlerD = new MockRestHandler("d", settings); BaseRestHandler handlerE = new MockRestHandler("e", settings); BaseRestHandler handlerF = new MockRestHandler("f", settings); - UsageService usageService = new UsageService(settings); + UsageService usageService = new UsageService(); usageService.addRestHandler(handlerA); usageService.addRestHandler(handlerB); usageService.addRestHandler(handlerC); diff --git a/settings.gradle b/settings.gradle index dedf3520bbb..c5acf583000 100644 --- a/settings.gradle +++ b/settings.gradle @@ -24,10 +24,10 @@ List projects = [ 'distribution:packages:deb', 'distribution:packages:oss-rpm', 'distribution:packages:rpm', - 'distribution:bwc:next-minor-snapshot', - 'distribution:bwc:staged-minor-snapshot', - 'distribution:bwc:next-bugfix-snapshot', - 'distribution:bwc:maintenance-bugfix-snapshot', + 'distribution:bwc:bugfix', + 'distribution:bwc:maintenance', + 'distribution:bwc:minor', + 'distribution:bwc:staged', 'distribution:tools:java-version-checker', 'distribution:tools:launchers', 'distribution:tools:plugin-cli', diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java index e1205ba846b..b3681247b4e 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java @@ -70,19 +70,19 @@ public abstract class ESAllocationTestCase extends ESTestCase { } public static MockAllocationService createAllocationService(Settings settings, ClusterSettings clusterSettings, Random random) { - return new MockAllocationService(settings, + return new MockAllocationService( randomAllocationDeciders(settings, clusterSettings, random), new TestGatewayAllocator(), new BalancedShardsAllocator(settings), EmptyClusterInfoService.INSTANCE); } public static MockAllocationService createAllocationService(Settings settings, ClusterInfoService clusterInfoService) { - return new MockAllocationService(settings, + return new MockAllocationService( randomAllocationDeciders(settings, EMPTY_CLUSTER_SETTINGS, random()), new TestGatewayAllocator(), new BalancedShardsAllocator(settings), clusterInfoService); } public static MockAllocationService createAllocationService(Settings settings, GatewayAllocator gatewayAllocator) { - return new MockAllocationService(settings, + return new MockAllocationService( randomAllocationDeciders(settings, EMPTY_CLUSTER_SETTINGS, random()), gatewayAllocator, new BalancedShardsAllocator(settings), EmptyClusterInfoService.INSTANCE); } @@ -91,7 +91,7 @@ public abstract class ESAllocationTestCase extends ESTestCase { List deciders = new ArrayList<>( ClusterModule.createAllocationDeciders(settings, clusterSettings, Collections.emptyList())); Collections.shuffle(deciders, random); - return new AllocationDeciders(settings, deciders); + return new AllocationDeciders(deciders); } protected static Set MASTER_DATA_ROLES = @@ -127,18 +127,18 @@ public abstract class ESAllocationTestCase extends ESTestCase { } protected static AllocationDeciders yesAllocationDeciders() { - return new AllocationDeciders(Settings.EMPTY, Arrays.asList( + return new AllocationDeciders(Arrays.asList( new TestAllocateDecision(Decision.YES), new SameShardAllocationDecider(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)))); } protected static AllocationDeciders noAllocationDeciders() { - return new AllocationDeciders(Settings.EMPTY, Collections.singleton(new TestAllocateDecision(Decision.NO))); + return new AllocationDeciders(Collections.singleton(new TestAllocateDecision(Decision.NO))); } protected static AllocationDeciders throttleAllocationDeciders() { - return new AllocationDeciders(Settings.EMPTY, Arrays.asList( + return new AllocationDeciders(Arrays.asList( new TestAllocateDecision(Decision.THROTTLE), new SameShardAllocationDecider(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)))); @@ -159,7 +159,6 @@ public abstract class ESAllocationTestCase extends ESTestCase { private final Decision decision; public TestAllocateDecision(Decision decision) { - super(Settings.EMPTY); this.decision = decision; } @@ -184,9 +183,9 @@ public abstract class ESAllocationTestCase extends ESTestCase { private volatile long nanoTimeOverride = -1L; - public MockAllocationService(Settings settings, AllocationDeciders allocationDeciders, GatewayAllocator gatewayAllocator, + public MockAllocationService(AllocationDeciders allocationDeciders, GatewayAllocator gatewayAllocator, ShardsAllocator shardsAllocator, ClusterInfoService clusterInfoService) { - super(settings, allocationDeciders, gatewayAllocator, shardsAllocator, clusterInfoService); + super(allocationDeciders, gatewayAllocator, shardsAllocator, clusterInfoService); } public void setNanoTimeOverride(long nanoTime) { @@ -203,10 +202,7 @@ public abstract class ESAllocationTestCase extends ESTestCase { * Mocks behavior in ReplicaShardAllocator to remove delayed shards from list of unassigned shards so they don't get reassigned yet. */ protected static class DelayedShardsMockGatewayAllocator extends GatewayAllocator { - - public DelayedShardsMockGatewayAllocator() { - super(Settings.EMPTY); - } + public DelayedShardsMockGatewayAllocator() {} @Override public void applyStartedShards(RoutingAllocation allocation, List startedShards) { diff --git a/test/framework/src/main/java/org/elasticsearch/index/alias/RandomAliasActionsGenerator.java b/test/framework/src/main/java/org/elasticsearch/index/alias/RandomAliasActionsGenerator.java index d4d2d78789b..e8a554ca4aa 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/alias/RandomAliasActionsGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/index/alias/RandomAliasActionsGenerator.java @@ -82,6 +82,9 @@ public final class RandomAliasActionsGenerator { action.indexRouting(randomRouting().toString()); } } + if (randomBoolean()) { + action.writeIndex(randomBoolean()); + } } return action; } diff --git a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index 60a7655e9ed..c396cdfe845 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -167,7 +167,7 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase boolean closed = false; private ReplicationTargets replicationTargets; - private final PrimaryReplicaSyncer primaryReplicaSyncer = new PrimaryReplicaSyncer(Settings.EMPTY, + private final PrimaryReplicaSyncer primaryReplicaSyncer = new PrimaryReplicaSyncer( new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()), (request, parentTask, primaryAllocationId, primaryTerm, listener) -> { try { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 854592aa3fc..f5c9b50d7b8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -230,7 +230,7 @@ public abstract class ESTestCase extends LuceneTestCase { // filter out joda timezones that are deprecated for the java time migration List jodaTZIds = DateTimeZone.getAvailableIDs().stream() - .filter(DateUtils.DEPRECATED_SHORT_TZ_IDS::contains).sorted().collect(Collectors.toList()); + .filter(s -> DateUtils.DEPRECATED_SHORT_TZ_IDS.contains(s) == false).sorted().collect(Collectors.toList()); JODA_TIMEZONE_IDS = Collections.unmodifiableList(jodaTZIds); List javaTZIds = Arrays.asList(TimeZone.getAvailableIDs()); diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index de4226bf275..c8d380cf095 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -1562,6 +1562,7 @@ public final class InternalTestCluster extends TestCluster { // if we're adding too many master-eligible nodes at once, we can't update the min master setting before adding the nodes. updateMinMasterNodes(currentMasters + newMasters); } + rebuildUnicastHostFiles(nodeAndClients); // ensure that new nodes can find the existing nodes when they start List> futures = nodeAndClients.stream().map(node -> executor.submit(node::startNode)).collect(Collectors.toList()); try { diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/MockZenPing.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/MockZenPing.java index ae62acff9b7..37db06a15e6 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/discovery/MockZenPing.java +++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/MockZenPing.java @@ -21,7 +21,6 @@ package org.elasticsearch.test.discovery; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.discovery.zen.PingContextProvider; @@ -45,8 +44,7 @@ public final class MockZenPing extends AbstractComponent implements ZenPing { private final PingContextProvider contextProvider; - public MockZenPing(Settings settings, PingContextProvider contextProvider) { - super(settings); + public MockZenPing(PingContextProvider contextProvider) { this.contextProvider = contextProvider; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java index 2c8305b4e12..a42ee370ece 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java +++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java @@ -99,7 +99,7 @@ public class TestZenDiscovery extends ZenDiscovery { protected ZenPing newZenPing(Settings settings, ThreadPool threadPool, TransportService transportService, UnicastHostsProvider hostsProvider) { if (USE_MOCK_PINGS.get(settings)) { - return new MockZenPing(settings, this); + return new MockZenPing(this); } else { return super.newZenPing(settings, threadPool, transportService, hostsProvider); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/gateway/NoopGatewayAllocator.java b/test/framework/src/main/java/org/elasticsearch/test/gateway/NoopGatewayAllocator.java index d3e05d36f6e..9966bfb47fa 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/gateway/NoopGatewayAllocator.java +++ b/test/framework/src/main/java/org/elasticsearch/test/gateway/NoopGatewayAllocator.java @@ -22,7 +22,6 @@ package org.elasticsearch.test.gateway; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.FailedShard; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.GatewayAllocator; import java.util.List; @@ -34,10 +33,6 @@ public class NoopGatewayAllocator extends GatewayAllocator { public static final NoopGatewayAllocator INSTANCE = new NoopGatewayAllocator(); - protected NoopGatewayAllocator() { - super(Settings.EMPTY); - } - @Override public void applyStartedShards(RoutingAllocation allocation, List startedShards) { // noop diff --git a/test/framework/src/main/java/org/elasticsearch/test/gateway/TestGatewayAllocator.java b/test/framework/src/main/java/org/elasticsearch/test/gateway/TestGatewayAllocator.java index 2bbf2ce4c2c..bcbe52e32cd 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/gateway/TestGatewayAllocator.java +++ b/test/framework/src/main/java/org/elasticsearch/test/gateway/TestGatewayAllocator.java @@ -24,7 +24,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.FailedShard; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.AsyncShardFetch; import org.elasticsearch.gateway.GatewayAllocator; import org.elasticsearch.gateway.PrimaryShardAllocator; @@ -60,7 +59,7 @@ public class TestGatewayAllocator extends GatewayAllocator { Map> knownAllocations = new HashMap<>(); DiscoveryNodes currentNodes = DiscoveryNodes.EMPTY_NODES; - PrimaryShardAllocator primaryShardAllocator = new PrimaryShardAllocator(Settings.EMPTY) { + PrimaryShardAllocator primaryShardAllocator = new PrimaryShardAllocator() { @Override protected AsyncShardFetch.FetchResult fetchData(ShardRouting shard, RoutingAllocation allocation) { // for now always return immediately what we know @@ -81,7 +80,7 @@ public class TestGatewayAllocator extends GatewayAllocator { } }; - ReplicaShardAllocator replicaShardAllocator = new ReplicaShardAllocator(Settings.EMPTY) { + ReplicaShardAllocator replicaShardAllocator = new ReplicaShardAllocator() { @Override protected AsyncShardFetch.FetchResult fetchData(ShardRouting shard, RoutingAllocation allocation) { // for now, just pretend no node has data @@ -95,10 +94,6 @@ public class TestGatewayAllocator extends GatewayAllocator { } }; - public TestGatewayAllocator() { - super(Settings.EMPTY); - } - @Override public void applyStartedShards(RoutingAllocation allocation, List startedShards) { currentNodes = allocation.nodes(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index d6c4f30a885..3fc4d030da0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -20,6 +20,7 @@ package org.elasticsearch.test.transport; import com.carrotsearch.randomizedtesting.SysGlobals; +import java.util.concurrent.TimeUnit; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterModule; @@ -599,21 +600,21 @@ public final class MockTransportService extends TransportService { Transport.Connection connection = super.openConnection(node, profile); synchronized (openConnections) { - List connections = openConnections.computeIfAbsent(node, - (n) -> new CopyOnWriteArrayList<>()); - connections.add(connection); - } - - connection.addCloseListener(ActionListener.wrap(() -> { - synchronized (openConnections) { - List connections = openConnections.get(node); - boolean remove = connections.remove(connection); - assert remove : "Should have removed connection"; - if (connections.isEmpty()) { - openConnections.remove(node); + openConnections.computeIfAbsent(node, n -> new CopyOnWriteArrayList<>()).add(connection); + connection.addCloseListener(ActionListener.wrap(() -> { + synchronized (openConnections) { + List connections = openConnections.get(node); + boolean remove = connections.remove(connection); + assert remove : "Should have removed connection"; + if (connections.isEmpty()) { + openConnections.remove(node); + } + if (openConnections.isEmpty()) { + openConnections.notifyAll(); + } } - } - })); + })); + } return connection; } @@ -621,8 +622,15 @@ public final class MockTransportService extends TransportService { @Override protected void doClose() throws IOException { super.doClose(); - synchronized (openConnections) { - assert openConnections.size() == 0 : "still open connections: " + openConnections; + try { + synchronized (openConnections) { + if (openConnections.isEmpty() == false) { + openConnections.wait(TimeUnit.SECONDS.toMillis(30L)); + } + assert openConnections.size() == 0 : "still open connections: " + openConnections; + } + } catch (InterruptedException e) { + throw new IllegalStateException(e); } } diff --git a/x-pack/docs/en/rest-api/watcher/stats.asciidoc b/x-pack/docs/en/rest-api/watcher/stats.asciidoc index 38f8ede925e..3f875485ba0 100644 --- a/x-pack/docs/en/rest-api/watcher/stats.asciidoc +++ b/x-pack/docs/en/rest-api/watcher/stats.asciidoc @@ -25,7 +25,7 @@ currently being executed by {watcher}. Additional information is shared per watch that is currently executing. This information includes the `watch_id`, the time its execution started and its current execution phase. -To include this metric, the `metric` option should be set to `executing_watches` +To include this metric, the `metric` option should be set to `current_watches` or `_all`. In addition you can also specify the `emit_stacktraces=true` parameter, which adds stack traces for each watch that is being executed. These stack traces can give you more insight into an execution of a watch. @@ -51,7 +51,7 @@ To include this metric, the `metric` option should include `queued_watches` or `metric`:: (enum) Defines which additional metrics are included in the response. - `executing_watches`::: Includes the current executing watches in the response. + `current_watches`::: Includes the current executing watches in the response. `queued_watches`::: Includes the watches queued for execution in the response. `_all`::: Includes all metrics in the response. @@ -98,7 +98,7 @@ and will include the basic metrics and metrics about the current executing watch [source,js] -------------------------------------------------- -GET _xpack/watcher/stats?metric=executing_watches +GET _xpack/watcher/stats?metric=current_watches -------------------------------------------------- // CONSOLE diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java index 884af81b67b..085d58bce83 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java @@ -150,7 +150,7 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E @Override public List> getPersistentTasksExecutor(ClusterService clusterService, ThreadPool threadPool, Client client) { - return Collections.singletonList(new ShardFollowTasksExecutor(settings, client, threadPool, clusterService)); + return Collections.singletonList(new ShardFollowTasksExecutor(client, threadPool, clusterService)); } public List> getActions() { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java index 88d07566c74..446e3aaee41 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java @@ -18,7 +18,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.xcontent.XContentType; @@ -58,8 +57,8 @@ public class ShardFollowTasksExecutor extends PersistentTasksExecutor components = new ArrayList<>(); // just create the reloader as it will pull all of the loaded ssl configurations and start watching them - new SSLConfigurationReloader(settings, environment, getSslService(), resourceWatcherService); + new SSLConfigurationReloader(environment, getSslService(), resourceWatcherService); setLicenseService(new LicenseService(settings, clusterService, getClock(), environment, resourceWatcherService, getLicenseState())); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/AllocationRoutedStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/AllocationRoutedStep.java index be7c7799bd0..5e4ba927074 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/AllocationRoutedStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/AllocationRoutedStep.java @@ -37,7 +37,7 @@ public class AllocationRoutedStep extends ClusterStateWaitStep { private static final Logger logger = LogManager.getLogger(AllocationRoutedStep.class); - private static final AllocationDeciders ALLOCATION_DECIDERS = new AllocationDeciders(Settings.EMPTY, Collections.singletonList( + private static final AllocationDeciders ALLOCATION_DECIDERS = new AllocationDeciders(Collections.singletonList( new FilterAllocationDecider(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)))); private boolean waitOnAllShardCopies; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/SetSingleNodeAllocateStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/SetSingleNodeAllocateStep.java index 55b5e6e5053..602ca6cbeb7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/SetSingleNodeAllocateStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexlifecycle/SetSingleNodeAllocateStep.java @@ -30,7 +30,7 @@ import java.util.Optional; public class SetSingleNodeAllocateStep extends AsyncActionStep { public static final String NAME = "set-single-node-allocation"; - private static final AllocationDeciders ALLOCATION_DECIDERS = new AllocationDeciders(Settings.EMPTY, Collections.singletonList( + private static final AllocationDeciders ALLOCATION_DECIDERS = new AllocationDeciders(Collections.singletonList( new FilterAllocationDecider(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)))); public SetSingleNodeAllocateStep(StepKey key, StepKey nextStepKey, Client client) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java index 9cad992327e..b5aac1e6225 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java @@ -422,9 +422,9 @@ public class DatafeedConfig extends AbstractDiffable implements public static class Builder { + public static final int DEFAULT_AGGREGATION_CHUNKING_BUCKETS = 1000; private static final TimeValue MIN_DEFAULT_QUERY_DELAY = TimeValue.timeValueMinutes(1); private static final TimeValue MAX_DEFAULT_QUERY_DELAY = TimeValue.timeValueMinutes(2); - private static final int DEFAULT_AGGREGATION_CHUNKING_BUCKETS = 1000; private String id; private String jobId; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtils.java index 35999c9752f..f0ba07ad15c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtils.java @@ -139,7 +139,7 @@ public final class ExtractorUtils { } } - static long validateAndGetCalendarInterval(String calendarInterval) { + public static long validateAndGetCalendarInterval(String calendarInterval) { TimeValue interval; DateTimeUnit dateTimeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(calendarInterval); if (dateTimeUnit != null) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java index 3980282321c..595df0f8c31 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java @@ -25,7 +25,11 @@ public class RollupSearchAction extends Action { return new SearchResponse(); } - static class RequestBuilder extends ActionRequestBuilder { + public static class RequestBuilder extends ActionRequestBuilder { + public RequestBuilder(ElasticsearchClient client, SearchRequest searchRequest) { + super(client, INSTANCE, searchRequest); + } + RequestBuilder(ElasticsearchClient client) { super(client, INSTANCE, new SearchRequest()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloader.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloader.java index 2217513c03f..4f8d7372b0c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloader.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloader.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.core.ssl; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.watcher.FileChangesListener; import org.elasticsearch.watcher.FileWatcher; @@ -35,8 +34,7 @@ public class SSLConfigurationReloader extends AbstractComponent { private final ResourceWatcherService resourceWatcherService; private final SSLService sslService; - public SSLConfigurationReloader(Settings settings, Environment env, SSLService sslService, ResourceWatcherService resourceWatcher) { - super(settings); + public SSLConfigurationReloader(Environment env, SSLService sslService, ResourceWatcherService resourceWatcher) { this.environment = env; this.resourceWatcherService = resourceWatcher; this.sslService = sslService; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java index e22e09e5979..00a0193e460 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java @@ -88,7 +88,6 @@ public class SSLService extends AbstractComponent { * for use later */ public SSLService(Settings settings, Environment environment) { - super(settings); this.settings = settings; this.env = environment; this.globalSSLConfiguration = new SSLConfiguration(settings.getByPrefix(XPackSettings.GLOBAL_SSL_PREFIX)); @@ -98,7 +97,6 @@ public class SSLService extends AbstractComponent { private SSLService(Settings settings, Environment environment, SSLConfiguration globalSSLConfiguration, Map sslConfigurations, Map sslContexts) { - super(settings); this.settings = settings; this.env = environment; this.globalSSLConfiguration = globalSSLConfiguration; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/crypto/CryptoService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/crypto/CryptoService.java index a25e79ffdf6..212c6d9c1c0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/crypto/CryptoService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/crypto/CryptoService.java @@ -68,7 +68,6 @@ public class CryptoService extends AbstractComponent { private final SecretKey encryptionKey; public CryptoService(Settings settings) throws IOException { - super(settings); this.encryptionAlgorithm = ENCRYPTION_ALGO_SETTING.get(settings); final int keyLength = ENCRYPTION_KEY_LENGTH_SETTING.get(settings); this.ivLength = keyLength / 8; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoActionResponseTests.java index 160641be469..5267e5dc2ff 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoActionResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoActionResponseTests.java @@ -80,7 +80,7 @@ public class DeprecationInfoActionResponseTests extends AbstractStreamableTestCa List nodeStats = Collections.singletonList(new NodeStats(discoveryNode, 0L, null, null, null, null, null, null, null, null, null, null, null, null, null)); - IndexNameExpressionResolver resolver = new IndexNameExpressionResolver(Settings.EMPTY); + IndexNameExpressionResolver resolver = new IndexNameExpressionResolver(); IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, true); boolean clusterIssueFound = randomBoolean(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java index d8e0b693f70..2290a347528 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java @@ -307,7 +307,7 @@ public class SSLConfigurationReloaderTests extends ESTestCase { Environment env = randomBoolean() ? null : TestEnvironment.newEnvironment(settings); final SSLService sslService = new SSLService(settings, env); final SSLConfiguration config = sslService.getSSLConfiguration("xpack.ssl"); - new SSLConfigurationReloader(settings, env, sslService, resourceWatcherService) { + new SSLConfigurationReloader(env, sslService, resourceWatcherService) { @Override void reloadSSLContext(SSLConfiguration configuration) { fail("reload should not be called! [keystore reload exception]"); @@ -348,7 +348,7 @@ public class SSLConfigurationReloaderTests extends ESTestCase { Environment env = randomBoolean() ? null : TestEnvironment.newEnvironment(settings); final SSLService sslService = new SSLService(settings, env); final SSLConfiguration config = sslService.getSSLConfiguration("xpack.ssl"); - new SSLConfigurationReloader(settings, env, sslService, resourceWatcherService) { + new SSLConfigurationReloader(env, sslService, resourceWatcherService) { @Override void reloadSSLContext(SSLConfiguration configuration) { fail("reload should not be called! [pem key reload exception]"); @@ -383,7 +383,7 @@ public class SSLConfigurationReloaderTests extends ESTestCase { Environment env = randomBoolean() ? null : TestEnvironment.newEnvironment(settings); final SSLService sslService = new SSLService(settings, env); final SSLConfiguration config = sslService.getSSLConfiguration("xpack.ssl"); - new SSLConfigurationReloader(settings, env, sslService, resourceWatcherService) { + new SSLConfigurationReloader(env, sslService, resourceWatcherService) { @Override void reloadSSLContext(SSLConfiguration configuration) { fail("reload should not be called! [truststore reload exception]"); @@ -415,7 +415,7 @@ public class SSLConfigurationReloaderTests extends ESTestCase { Environment env = randomBoolean() ? null : TestEnvironment.newEnvironment(settings); final SSLService sslService = new SSLService(settings, env); final SSLConfiguration config = sslService.getSSLConfiguration("xpack.ssl"); - new SSLConfigurationReloader(settings, env, sslService, resourceWatcherService) { + new SSLConfigurationReloader(env, sslService, resourceWatcherService) { @Override void reloadSSLContext(SSLConfiguration configuration) { fail("reload should not be called! [pem trust reload exception]"); @@ -444,7 +444,7 @@ public class SSLConfigurationReloaderTests extends ESTestCase { final CountDownLatch reloadLatch = new CountDownLatch(1); final SSLService sslService = new SSLService(settings, env); final SSLConfiguration config = sslService.getSSLConfiguration("xpack.ssl"); - new SSLConfigurationReloader(settings, env, sslService, resourceWatcherService) { + new SSLConfigurationReloader(env, sslService, resourceWatcherService) { @Override void reloadSSLContext(SSLConfiguration configuration) { super.reloadSSLContext(configuration); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java index 5772d0be428..9825189ba64 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java @@ -17,6 +17,7 @@ import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.core.ml.integration.MlRestTestStateCleaner; import org.elasticsearch.xpack.core.ml.notifications.AuditorField; +import org.elasticsearch.xpack.core.rollup.job.RollupJob; import org.elasticsearch.xpack.ml.MachineLearning; import org.junit.After; import org.junit.Before; @@ -27,6 +28,7 @@ import java.util.Collections; import java.util.Date; import java.util.List; import java.util.Locale; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; @@ -63,6 +65,16 @@ public class DatafeedJobsRestIT extends ESRestTestCase { client().performRequest(request); } + private void setupFullAccessRole(String index) throws IOException { + Request request = new Request("PUT", "/_xpack/security/role/test_data_access"); + request.setJsonEntity("{" + + " \"indices\" : [" + + " { \"names\": [\"" + index + "\"], \"privileges\": [\"all\"] }" + + " ]" + + "}"); + client().performRequest(request); + } + private void setupUser(String user, List roles) throws IOException { String password = new String(SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING.getChars()); @@ -359,7 +371,75 @@ public class DatafeedJobsRestIT extends ESRestTestCase { assertThat(e.getMessage(), containsString("Cannot create datafeed")); assertThat(e.getMessage(), - containsString("user ml_admin lacks permissions on the indices to be searched")); + containsString("user ml_admin lacks permissions on the indices")); + } + + public void testInsufficientSearchPrivilegesOnPutWithRollup() throws Exception { + setupDataAccessRole("airline-data-aggs-rollup"); + String jobId = "privs-put-job-rollup"; + Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId); + createJobRequest.setJsonEntity("{\n" + + " \"description\": \"Aggs job\",\n" + + " \"analysis_config\": {\n" + + " \"bucket_span\": \"1h\",\n" + + " \"summary_count_field_name\": \"doc_count\",\n" + + " \"detectors\": [\n" + + " {\n" + + " \"function\": \"mean\",\n" + + " \"field_name\": \"responsetime\",\n" + + " \"by_field_name\": \"airline\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"data_description\": {\"time_field\": \"time stamp\"}\n" + + "}"); + client().performRequest(createJobRequest); + + String rollupJobId = "rollup-" + jobId; + Request createRollupRequest = new Request("PUT", "/_xpack/rollup/job/" + rollupJobId); + createRollupRequest.setJsonEntity("{\n" + + "\"index_pattern\": \"airline-data-aggs\",\n" + + " \"rollup_index\": \"airline-data-aggs-rollup\",\n" + + " \"cron\": \"*/30 * * * * ?\",\n" + + " \"page_size\" :1000,\n" + + " \"groups\" : {\n" + + " \"date_histogram\": {\n" + + " \"field\": \"time stamp\",\n" + + " \"interval\": \"2m\",\n" + + " \"delay\": \"7d\"\n" + + " },\n" + + " \"terms\": {\n" + + " \"fields\": [\"airline\"]\n" + + " }" + + " },\n" + + " \"metrics\": [\n" + + " {\n" + + " \"field\": \"responsetime\",\n" + + " \"metrics\": [\"avg\",\"min\",\"max\",\"sum\"]\n" + + " },\n" + + " {\n" + + " \"field\": \"time stamp\",\n" + + " \"metrics\": [\"min\",\"max\"]\n" + + " }\n" + + " ]\n" + + "}"); + client().performRequest(createRollupRequest); + + String datafeedId = "datafeed-" + jobId; + String aggregations = "{\"buckets\":{\"date_histogram\":{\"field\":\"time stamp\",\"interval\":3600000}," + + "\"aggregations\":{" + + "\"time stamp\":{\"max\":{\"field\":\"time stamp\"}}," + + "\"responsetime\":{\"avg\":{\"field\":\"responsetime\"}}}}}"; + + + ResponseException e = expectThrows(ResponseException.class, () -> + new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs-rollup", "doc") + .setAggregations(aggregations) + .setAuthHeader(BASIC_AUTH_VALUE_ML_ADMIN_WITH_SOME_DATA_ACCESS) //want to search, but no admin access + .build()); + assertThat(e.getMessage(), containsString("Cannot create datafeed")); + assertThat(e.getMessage(), + containsString("user ml_admin_plus_data lacks permissions on the indices")); } public void testInsufficientSearchPrivilegesOnPreview() throws Exception { @@ -615,7 +695,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase { // There should be a notification saying that there was a problem extracting data client().performRequest(new Request("POST", "/_refresh")); Response notificationsResponse = client().performRequest( - new Request("GET", AuditorField.NOTIFICATIONS_INDEX + "/_search?q=job_id:" + jobId)); + new Request("GET", AuditorField.NOTIFICATIONS_INDEX + "/_search?size=1000&q=job_id:" + jobId)); String notificationsResponseAsString = EntityUtils.toString(notificationsResponse.getEntity()); assertThat(notificationsResponseAsString, containsString("\"message\":\"Datafeed is encountering errors extracting data: " + "action [indices:data/read/search] is unauthorized for user [ml_admin_plus_data]\"")); @@ -663,6 +743,171 @@ public class DatafeedJobsRestIT extends ESRestTestCase { assertThat(jobStatsResponseAsString, containsString("\"missing_field_count\":0")); } + public void testLookbackOnlyGivenAggregationsWithHistogramAndRollupIndex() throws Exception { + String jobId = "aggs-histogram-rollup-job"; + Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId); + createJobRequest.setJsonEntity("{\n" + + " \"description\": \"Aggs job\",\n" + + " \"analysis_config\": {\n" + + " \"bucket_span\": \"1h\",\n" + + " \"summary_count_field_name\": \"doc_count\",\n" + + " \"detectors\": [\n" + + " {\n" + + " \"function\": \"mean\",\n" + + " \"field_name\": \"responsetime\",\n" + + " \"by_field_name\": \"airline\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"data_description\": {\"time_field\": \"time stamp\"}\n" + + "}"); + client().performRequest(createJobRequest); + + String rollupJobId = "rollup-" + jobId; + Request createRollupRequest = new Request("PUT", "/_xpack/rollup/job/" + rollupJobId); + createRollupRequest.setJsonEntity("{\n" + + "\"index_pattern\": \"airline-data-aggs\",\n" + + " \"rollup_index\": \"airline-data-aggs-rollup\",\n" + + " \"cron\": \"*/30 * * * * ?\",\n" + + " \"page_size\" :1000,\n" + + " \"groups\" : {\n" + + " \"date_histogram\": {\n" + + " \"field\": \"time stamp\",\n" + + " \"interval\": \"2m\",\n" + + " \"delay\": \"7d\"\n" + + " },\n" + + " \"terms\": {\n" + + " \"fields\": [\"airline\"]\n" + + " }" + + " },\n" + + " \"metrics\": [\n" + + " {\n" + + " \"field\": \"responsetime\",\n" + + " \"metrics\": [\"avg\",\"min\",\"max\",\"sum\"]\n" + + " },\n" + + " {\n" + + " \"field\": \"time stamp\",\n" + + " \"metrics\": [\"min\",\"max\"]\n" + + " }\n" + + " ]\n" + + "}"); + client().performRequest(createRollupRequest); + client().performRequest(new Request("POST", "/_xpack/rollup/job/" + rollupJobId + "/_start")); + + assertBusy(() -> { + Response getRollup = client().performRequest(new Request("GET", "/_xpack/rollup/job/" + rollupJobId)); + String body = EntityUtils.toString(getRollup.getEntity()); + assertThat(body, containsString("\"job_state\":\"started\"")); + assertThat(body, containsString("\"rollups_indexed\":4")); + }, 60, TimeUnit.SECONDS); + + client().performRequest(new Request("POST", "/_xpack/rollup/job/" + rollupJobId + "/_stop")); + assertBusy(() -> { + Response getRollup = client().performRequest(new Request("GET", "/_xpack/rollup/job/" + rollupJobId)); + assertThat(EntityUtils.toString(getRollup.getEntity()), containsString("\"job_state\":\"stopped\"")); + }, 60, TimeUnit.SECONDS); + + final Request refreshRollupIndex = new Request("POST", "airline-data-aggs-rollup/_refresh"); + client().performRequest(refreshRollupIndex); + + String datafeedId = "datafeed-" + jobId; + String aggregations = "{\"buckets\":{\"date_histogram\":{\"field\":\"time stamp\",\"interval\":3600000}," + + "\"aggregations\":{" + + "\"time stamp\":{\"max\":{\"field\":\"time stamp\"}}," + + "\"responsetime\":{\"avg\":{\"field\":\"responsetime\"}}}}}"; + new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs-rollup", "response").setAggregations(aggregations).build(); + openJob(client(), jobId); + + startDatafeedAndWaitUntilStopped(datafeedId); + waitUntilJobIsClosed(jobId); + Response jobStatsResponse = client().performRequest(new Request("GET", + MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats")); + String jobStatsResponseAsString = EntityUtils.toString(jobStatsResponse.getEntity()); + assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":2")); + assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":2")); + } + + public void testLookbackWithoutPermissionsAndRollup() throws Exception { + setupFullAccessRole("airline-data-aggs-rollup"); + String jobId = "rollup-permission-test-network-job"; + Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId); + createJobRequest.setJsonEntity("{\n" + + " \"description\": \"Aggs job\",\n" + + " \"analysis_config\": {\n" + + " \"bucket_span\": \"1h\",\n" + + " \"summary_count_field_name\": \"doc_count\",\n" + + " \"detectors\": [\n" + + " {\n" + + " \"function\": \"mean\",\n" + + " \"field_name\": \"responsetime\",\n" + + " \"by_field_name\": \"airline\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"data_description\": {\"time_field\": \"time stamp\"}\n" + + "}"); + client().performRequest(createJobRequest); + + String rollupJobId = "rollup-" + jobId; + Request createRollupRequest = new Request("PUT", "/_xpack/rollup/job/" + rollupJobId); + createRollupRequest.setJsonEntity("{\n" + + "\"index_pattern\": \"airline-data-aggs\",\n" + + " \"rollup_index\": \"airline-data-aggs-rollup\",\n" + + " \"cron\": \"*/30 * * * * ?\",\n" + + " \"page_size\" :1000,\n" + + " \"groups\" : {\n" + + " \"date_histogram\": {\n" + + " \"field\": \"time stamp\",\n" + + " \"interval\": \"2m\",\n" + + " \"delay\": \"7d\"\n" + + " },\n" + + " \"terms\": {\n" + + " \"fields\": [\"airline\"]\n" + + " }" + + " },\n" + + " \"metrics\": [\n" + + " {\n" + + " \"field\": \"responsetime\",\n" + + " \"metrics\": [\"avg\",\"min\",\"max\",\"sum\"]\n" + + " },\n" + + " {\n" + + " \"field\": \"time stamp\",\n" + + " \"metrics\": [\"min\",\"max\"]\n" + + " }\n" + + " ]\n" + + "}"); + client().performRequest(createRollupRequest); + + String datafeedId = "datafeed-" + jobId; + String aggregations = "{\"buckets\":{\"date_histogram\":{\"field\":\"time stamp\",\"interval\":3600000}," + + "\"aggregations\":{" + + "\"time stamp\":{\"max\":{\"field\":\"time stamp\"}}," + + "\"responsetime\":{\"avg\":{\"field\":\"responsetime\"}}}}}"; + + + // At the time we create the datafeed the user can access the network-data index that we have access to + new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs-rollup", "doc") + .setAggregations(aggregations) + .setChunkingTimespan("300s") + .setAuthHeader(BASIC_AUTH_VALUE_ML_ADMIN_WITH_SOME_DATA_ACCESS) + .build(); + + // Change the role so that the user can no longer access network-data + setupFullAccessRole("some-other-data"); + + openJob(client(), jobId); + + startDatafeedAndWaitUntilStopped(datafeedId, BASIC_AUTH_VALUE_ML_ADMIN_WITH_SOME_DATA_ACCESS); + waitUntilJobIsClosed(jobId); + // There should be a notification saying that there was a problem extracting data + client().performRequest(new Request("POST", "/_refresh")); + Response notificationsResponse = client().performRequest( + new Request("GET", AuditorField.NOTIFICATIONS_INDEX + "/_search?size=1000&q=job_id:" + jobId)); + String notificationsResponseAsString = EntityUtils.toString(notificationsResponse.getEntity()); + assertThat(notificationsResponseAsString, containsString("\"message\":\"Datafeed is encountering errors extracting data: " + + "action [indices:admin/xpack/rollup/search] is unauthorized for user [ml_admin_plus_data]\"")); + } + public void testRealtime() throws Exception { String jobId = "job-realtime-1"; createJob(jobId, "airline"); @@ -882,7 +1127,8 @@ public class DatafeedJobsRestIT extends ESRestTestCase { @After public void clearMlState() throws Exception { new MlRestTestStateCleaner(logger, adminClient()).clearMlMetadata(); - ESRestTestCase.waitForPendingTasks(adminClient()); + // Don't check rollup jobs because we clear them in the superclass. + waitForPendingTasks(adminClient(), taskName -> taskName.startsWith(RollupJob.NAME)); } private static class DatafeedBuilder { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/InvalidLicenseEnforcer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/InvalidLicenseEnforcer.java index 7c9f163a225..21e06362a70 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/InvalidLicenseEnforcer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/InvalidLicenseEnforcer.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.ml; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.threadpool.ThreadPool; @@ -20,9 +19,8 @@ public class InvalidLicenseEnforcer extends AbstractComponent { private final DatafeedManager datafeedManager; private final AutodetectProcessManager autodetectProcessManager; - InvalidLicenseEnforcer(Settings settings, XPackLicenseState licenseState, ThreadPool threadPool, + InvalidLicenseEnforcer(XPackLicenseState licenseState, ThreadPool threadPool, DatafeedManager datafeedManager, AutodetectProcessManager autodetectProcessManager) { - super(settings); this.threadPool = threadPool; this.licenseState = licenseState; this.datafeedManager = datafeedManager; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 738f5a9e1a4..086754054b4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -365,11 +365,11 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu Auditor auditor = new Auditor(client, clusterService.getNodeName()); JobResultsProvider jobResultsProvider = new JobResultsProvider(client, settings); - UpdateJobProcessNotifier notifier = new UpdateJobProcessNotifier(settings, client, clusterService, threadPool); + UpdateJobProcessNotifier notifier = new UpdateJobProcessNotifier(client, clusterService, threadPool); JobManager jobManager = new JobManager(env, settings, jobResultsProvider, clusterService, auditor, client, notifier); - JobDataCountsPersister jobDataCountsPersister = new JobDataCountsPersister(settings, client); - JobResultsPersister jobResultsPersister = new JobResultsPersister(settings, client); + JobDataCountsPersister jobDataCountsPersister = new JobDataCountsPersister(client); + JobResultsPersister jobResultsPersister = new JobResultsPersister(client); AutodetectProcessFactory autodetectProcessFactory; NormalizerProcessFactory normalizerProcessFactory; @@ -412,7 +412,7 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu autodetectProcessManager); // This object's constructor attaches to the license state, so there's no need to retain another reference to it - new InvalidLicenseEnforcer(settings, getLicenseState(), threadPool, datafeedManager, autodetectProcessManager); + new InvalidLicenseEnforcer(getLicenseState(), threadPool, datafeedManager, autodetectProcessManager); // run node startup tasks autodetectProcessManager.onNodeStartup(); @@ -422,11 +422,11 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu jobResultsProvider, jobManager, autodetectProcessManager, - new MlInitializationService(settings, threadPool, clusterService, client), + new MlInitializationService(threadPool, clusterService, client), jobDataCountsPersister, datafeedManager, auditor, - new MlAssignmentNotifier(settings, auditor, clusterService) + new MlAssignmentNotifier(auditor, clusterService) ); } @@ -438,7 +438,7 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu return Arrays.asList( new TransportOpenJobAction.OpenJobPersistentTasksExecutor(settings, clusterService, autodetectProcessManager.get()), - new TransportStartDatafeedAction.StartDatafeedPersistentTasksExecutor(settings, datafeedManager.get()) + new TransportStartDatafeedAction.StartDatafeedPersistentTasksExecutor(datafeedManager.get()) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAssignmentNotifier.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAssignmentNotifier.java index 37d714d1777..1bd4157ed48 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAssignmentNotifier.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlAssignmentNotifier.java @@ -11,7 +11,6 @@ import org.elasticsearch.cluster.LocalNodeMasterListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.OpenJobAction; @@ -32,8 +31,7 @@ public class MlAssignmentNotifier extends AbstractComponent implements ClusterSt private final AtomicBoolean enabled = new AtomicBoolean(false); - MlAssignmentNotifier(Settings settings, Auditor auditor, ClusterService clusterService) { - super(settings); + MlAssignmentNotifier(Auditor auditor, ClusterService clusterService) { this.auditor = auditor; this.clusterService = clusterService; clusterService.addLocalNodeMasterListener(this); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java index c96a12ffa10..016fcd5e928 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java @@ -11,7 +11,6 @@ import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.LifecycleListener; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.threadpool.ThreadPool; @@ -23,8 +22,7 @@ class MlInitializationService extends AbstractComponent implements ClusterStateL private volatile MlDailyMaintenanceService mlDailyMaintenanceService; - MlInitializationService(Settings settings, ThreadPool threadPool, ClusterService clusterService, Client client) { - super(settings); + MlInitializationService(ThreadPool threadPool, ClusterService clusterService, Client client) { this.threadPool = threadPool; this.clusterService = clusterService; this.client = client; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlLifeCycleService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlLifeCycleService.java index efc0517900e..26153455696 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlLifeCycleService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlLifeCycleService.java @@ -28,7 +28,6 @@ public class MlLifeCycleService extends AbstractComponent { public MlLifeCycleService(Environment environment, ClusterService clusterService, DatafeedManager datafeedManager, AutodetectProcessManager autodetectProcessManager) { - super(environment.settings()); this.environment = environment; this.datafeedManager = datafeedManager; this.autodetectProcessManager = autodetectProcessManager; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java index f3f48933602..9f92d41edbc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java @@ -690,7 +690,7 @@ public class TransportOpenJobAction extends TransportMasterNodeAction { private final XPackLicenseState licenseState; @@ -78,23 +84,48 @@ public class TransportPutDatafeedAction extends TransportMasterNodeAction privResponseListener = ActionListener.wrap( - r -> handlePrivsResponse(username, request, r, listener), - listener::onFailure); - HasPrivilegesRequest privRequest = new HasPrivilegesRequest(); + final String[] indices = request.getDatafeed().getIndices().toArray(new String[0]); + + final String username = securityContext.getUser().principal(); + final HasPrivilegesRequest privRequest = new HasPrivilegesRequest(); + privRequest.applicationPrivileges(new RoleDescriptor.ApplicationResourcePrivileges[0]); privRequest.username(username); privRequest.clusterPrivileges(Strings.EMPTY_ARRAY); - // We just check for permission to use the search action. In reality we'll also - // use the scroll action, but that's considered an implementation detail. - privRequest.indexPrivileges(RoleDescriptor.IndicesPrivileges.builder() - .indices(request.getDatafeed().getIndices().toArray(new String[0])) - .privileges(SearchAction.NAME) - .build()); - privRequest.applicationPrivileges(new RoleDescriptor.ApplicationResourcePrivileges[0]); - client.execute(HasPrivilegesAction.INSTANCE, privRequest, privResponseListener); + final RoleDescriptor.IndicesPrivileges.Builder indicesPrivilegesBuilder = RoleDescriptor.IndicesPrivileges.builder() + .indices(indices); + + ActionListener privResponseListener = ActionListener.wrap( + r -> handlePrivsResponse(username, request, r, listener), + listener::onFailure); + + ActionListener getRollupIndexCapsActionHandler = ActionListener.wrap( + response -> { + if (response.getJobs().isEmpty()) { // This means no rollup indexes are in the config + indicesPrivilegesBuilder.privileges(SearchAction.NAME); + } else { + indicesPrivilegesBuilder.privileges(SearchAction.NAME, RollupSearchAction.NAME); + } + privRequest.indexPrivileges(indicesPrivilegesBuilder.build()); + client.execute(HasPrivilegesAction.INSTANCE, privRequest, privResponseListener); + }, + e -> { + if (e instanceof IndexNotFoundException) { + indicesPrivilegesBuilder.privileges(SearchAction.NAME); + privRequest.indexPrivileges(indicesPrivilegesBuilder.build()); + client.execute(HasPrivilegesAction.INSTANCE, privRequest, privResponseListener); + } else { + listener.onFailure(e); + } + } + ); + + executeAsyncWithOrigin(client, + ML_ORIGIN, + GetRollupIndexCapsAction.INSTANCE, + new GetRollupIndexCapsAction.Request(indices), + getRollupIndexCapsActionHandler); } else { putDatafeed(request, threadPool.getThreadContext().getHeaders(), listener); } @@ -115,8 +146,7 @@ public class TransportPutDatafeedAction extends TransportMasterNodeAction currentTimeSupplier, Auditor auditor) { - super(Settings.EMPTY); this.client = Objects.requireNonNull(client); this.clusterService = Objects.requireNonNull(clusterService); this.threadPool = threadPool; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactory.java index 8fd1ced1729..77e2c695db7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactory.java @@ -5,14 +5,19 @@ */ package org.elasticsearch.xpack.ml.datafeed.extractor; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.Client; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; +import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; import org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationDataExtractorFactory; import org.elasticsearch.xpack.ml.datafeed.extractor.chunked.ChunkedDataExtractorFactory; +import org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.RollupDataExtractorFactory; import org.elasticsearch.xpack.ml.datafeed.extractor.scroll.ScrollDataExtractorFactory; -import org.elasticsearch.xpack.core.ml.job.config.Job; public interface DataExtractorFactory { DataExtractor newExtractor(long start, long end); @@ -22,16 +27,44 @@ public interface DataExtractorFactory { */ static void create(Client client, DatafeedConfig datafeed, Job job, ActionListener listener) { ActionListener factoryHandler = ActionListener.wrap( - factory -> listener.onResponse(datafeed.getChunkingConfig().isEnabled() - ? new ChunkedDataExtractorFactory(client, datafeed, job, factory) : factory) - , listener::onFailure + factory -> listener.onResponse(datafeed.getChunkingConfig().isEnabled() + ? new ChunkedDataExtractorFactory(client, datafeed, job, factory) : factory) + , listener::onFailure ); - boolean isScrollSearch = datafeed.hasAggregations() == false; - if (isScrollSearch) { - ScrollDataExtractorFactory.create(client, datafeed, job, factoryHandler); - } else { - factoryHandler.onResponse(new AggregationDataExtractorFactory(client, datafeed, job)); - } + ActionListener getRollupIndexCapsActionHandler = ActionListener.wrap( + response -> { + if (response.getJobs().isEmpty()) { // This means no rollup indexes are in the config + if (datafeed.hasAggregations()) { + factoryHandler.onResponse(new AggregationDataExtractorFactory(client, datafeed, job)); + } else { + ScrollDataExtractorFactory.create(client, datafeed, job, factoryHandler); + } + } else { + if (datafeed.hasAggregations()) { // Rollup indexes require aggregations + RollupDataExtractorFactory.create(client, datafeed, job, response.getJobs(), factoryHandler); + } else { + listener.onFailure(new IllegalArgumentException("Aggregations are required when using Rollup indices")); + } + } + }, + e -> { + if (e instanceof IndexNotFoundException) { + listener.onFailure(new ResourceNotFoundException("datafeed [" + datafeed.getId() + + "] cannot retrieve data because index " + ((IndexNotFoundException)e).getIndex() + " does not exist")); + } else { + listener.onFailure(e); + } + } + ); + + GetRollupIndexCapsAction.Request request = new GetRollupIndexCapsAction.Request(datafeed.getIndices().toArray(new String[0])); + + ClientHelper.executeAsyncWithOrigin( + client, + ClientHelper.ML_ORIGIN, + GetRollupIndexCapsAction.INSTANCE, + request, + getRollupIndexCapsActionHandler); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AbstractAggregationDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AbstractAggregationDataExtractor.java new file mode 100644 index 00000000000..df858f45c82 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AbstractAggregationDataExtractor.java @@ -0,0 +1,167 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.datafeed.extractor.aggregation; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.search.aggregations.Aggregation; +import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; +import org.elasticsearch.xpack.core.ml.datafeed.extractor.ExtractorUtils; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.List; +import java.util.NoSuchElementException; +import java.util.Objects; +import java.util.Optional; +import java.util.stream.Collectors; + +/** + * Abstract class for aggregated data extractors, e.g. {@link RollupDataExtractor} + * + * @param The request builder type for getting data from ElasticSearch + */ +abstract class AbstractAggregationDataExtractor> + implements DataExtractor { + + private static final Logger LOGGER = LogManager.getLogger(AbstractAggregationDataExtractor.class); + + /** + * The number of key-value pairs written in each batch to process. + * This has to be a number that is small enough to allow for responsive + * cancelling and big enough to not cause overhead by calling the + * post data action too often. The value of 1000 was determined via + * such testing. + */ + private static int BATCH_KEY_VALUE_PAIRS = 1000; + + protected final Client client; + protected final AggregationDataExtractorContext context; + private boolean hasNext; + private boolean isCancelled; + private AggregationToJsonProcessor aggregationToJsonProcessor; + private ByteArrayOutputStream outputStream; + + AbstractAggregationDataExtractor(Client client, AggregationDataExtractorContext dataExtractorContext) { + this.client = Objects.requireNonNull(client); + context = Objects.requireNonNull(dataExtractorContext); + hasNext = true; + isCancelled = false; + outputStream = new ByteArrayOutputStream(); + } + + @Override + public boolean hasNext() { + return hasNext; + } + + @Override + public boolean isCancelled() { + return isCancelled; + } + + @Override + public void cancel() { + LOGGER.debug("[{}] Data extractor received cancel request", context.jobId); + isCancelled = true; + hasNext = false; + } + + @Override + public Optional next() throws IOException { + if (!hasNext()) { + throw new NoSuchElementException(); + } + + if (aggregationToJsonProcessor == null) { + Aggregations aggs = search(); + if (aggs == null) { + hasNext = false; + return Optional.empty(); + } + initAggregationProcessor(aggs); + } + + return Optional.ofNullable(processNextBatch()); + } + + private Aggregations search() throws IOException { + LOGGER.debug("[{}] Executing aggregated search", context.jobId); + SearchResponse searchResponse = executeSearchRequest(buildSearchRequest(buildBaseSearchSource())); + LOGGER.debug("[{}] Search response was obtained", context.jobId); + ExtractorUtils.checkSearchWasSuccessful(context.jobId, searchResponse); + return validateAggs(searchResponse.getAggregations()); + } + + private void initAggregationProcessor(Aggregations aggs) throws IOException { + aggregationToJsonProcessor = new AggregationToJsonProcessor(context.timeField, context.fields, context.includeDocCount, + context.start); + aggregationToJsonProcessor.process(aggs); + } + + protected SearchResponse executeSearchRequest(T searchRequestBuilder) { + return ClientHelper.executeWithHeaders(context.headers, ClientHelper.ML_ORIGIN, client, searchRequestBuilder::get); + } + + private SearchSourceBuilder buildBaseSearchSource() { + // For derivative aggregations the first bucket will always be null + // so query one extra histogram bucket back and hope there is data + // in that bucket + long histogramSearchStartTime = Math.max(0, context.start - ExtractorUtils.getHistogramIntervalMillis(context.aggs)); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() + .size(0) + .query(ExtractorUtils.wrapInTimeRangeQuery(context.query, context.timeField, histogramSearchStartTime, context.end)); + + context.aggs.getAggregatorFactories().forEach(searchSourceBuilder::aggregation); + context.aggs.getPipelineAggregatorFactories().forEach(searchSourceBuilder::aggregation); + return searchSourceBuilder; + } + + protected abstract T buildSearchRequest(SearchSourceBuilder searchRequestBuilder); + + private Aggregations validateAggs(@Nullable Aggregations aggs) { + if (aggs == null) { + return null; + } + List aggsAsList = aggs.asList(); + if (aggsAsList.isEmpty()) { + return null; + } + if (aggsAsList.size() > 1) { + throw new IllegalArgumentException("Multiple top level aggregations not supported; found: " + + aggsAsList.stream().map(Aggregation::getName).collect(Collectors.toList())); + } + + return aggs; + } + + private InputStream processNextBatch() throws IOException { + outputStream.reset(); + + hasNext = aggregationToJsonProcessor.writeDocs(BATCH_KEY_VALUE_PAIRS, outputStream); + return new ByteArrayInputStream(outputStream.toByteArray()); + } + + protected long getHistogramInterval() { + return ExtractorUtils.getHistogramIntervalMillis(context.aggs); + } + + public AggregationDataExtractorContext getContext() { + return context; + } + +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractor.java index 25896822388..8705c1beee8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractor.java @@ -5,28 +5,10 @@ */ package org.elasticsearch.xpack.ml.datafeed.extractor.aggregation; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.Aggregations; -import org.elasticsearch.xpack.core.ClientHelper; -import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; -import org.elasticsearch.xpack.core.ml.datafeed.extractor.ExtractorUtils; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.util.List; -import java.util.NoSuchElementException; -import java.util.Objects; -import java.util.Optional; -import java.util.stream.Collectors; +import org.elasticsearch.search.builder.SearchSourceBuilder; /** * An implementation that extracts data from elasticsearch using search with aggregations on a client. @@ -34,132 +16,19 @@ import java.util.stream.Collectors; * stored and they are then processed in batches. Cancellation is supported between batches. * Note that this class is NOT thread-safe. */ -class AggregationDataExtractor implements DataExtractor { - - private static final Logger LOGGER = LogManager.getLogger(AggregationDataExtractor.class); - - /** - * The number of key-value pairs written in each batch to process. - * This has to be a number that is small enough to allow for responsive - * cancelling and big enough to not cause overhead by calling the - * post data action too often. The value of 1000 was determined via - * such testing. - */ - private static int BATCH_KEY_VALUE_PAIRS = 1000; - - private final Client client; - private final AggregationDataExtractorContext context; - private boolean hasNext; - private boolean isCancelled; - private AggregationToJsonProcessor aggregationToJsonProcessor; - private ByteArrayOutputStream outputStream; +class AggregationDataExtractor extends AbstractAggregationDataExtractor { AggregationDataExtractor(Client client, AggregationDataExtractorContext dataExtractorContext) { - this.client = Objects.requireNonNull(client); - context = Objects.requireNonNull(dataExtractorContext); - hasNext = true; - isCancelled = false; - outputStream = new ByteArrayOutputStream(); + super(client, dataExtractorContext); } @Override - public boolean hasNext() { - return hasNext; + protected SearchRequestBuilder buildSearchRequest(SearchSourceBuilder searchSourceBuilder) { + + return new SearchRequestBuilder(client, SearchAction.INSTANCE) + .setSource(searchSourceBuilder) + .setIndices(context.indices) + .setTypes(context.types); } - @Override - public boolean isCancelled() { - return isCancelled; - } - - @Override - public void cancel() { - LOGGER.trace("[{}] Data extractor received cancel request", context.jobId); - isCancelled = true; - hasNext = false; - } - - @Override - public Optional next() throws IOException { - if (!hasNext()) { - throw new NoSuchElementException(); - } - - if (aggregationToJsonProcessor == null) { - Aggregations aggs = search(); - if (aggs == null) { - hasNext = false; - return Optional.empty(); - } - initAggregationProcessor(aggs); - } - - return Optional.ofNullable(processNextBatch()); - } - - private Aggregations search() throws IOException { - LOGGER.debug("[{}] Executing aggregated search", context.jobId); - SearchResponse searchResponse = executeSearchRequest(buildSearchRequest()); - LOGGER.debug("[{}] Search response was obtained", context.jobId); - ExtractorUtils.checkSearchWasSuccessful(context.jobId, searchResponse); - return validateAggs(searchResponse.getAggregations()); - } - - private void initAggregationProcessor(Aggregations aggs) throws IOException { - aggregationToJsonProcessor = new AggregationToJsonProcessor(context.timeField, context.fields, context.includeDocCount, - context.start); - aggregationToJsonProcessor.process(aggs); - } - - protected SearchResponse executeSearchRequest(SearchRequestBuilder searchRequestBuilder) { - return ClientHelper.executeWithHeaders(context.headers, ClientHelper.ML_ORIGIN, client, searchRequestBuilder::get); - } - - private SearchRequestBuilder buildSearchRequest() { - // For derivative aggregations the first bucket will always be null - // so query one extra histogram bucket back and hope there is data - // in that bucket - long histogramSearchStartTime = Math.max(0, context.start - getHistogramInterval()); - - SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client, SearchAction.INSTANCE) - .setIndices(context.indices) - .setTypes(context.types) - .setSize(0) - .setQuery(ExtractorUtils.wrapInTimeRangeQuery(context.query, context.timeField, histogramSearchStartTime, context.end)); - - context.aggs.getAggregatorFactories().forEach(searchRequestBuilder::addAggregation); - context.aggs.getPipelineAggregatorFactories().forEach(searchRequestBuilder::addAggregation); - return searchRequestBuilder; - } - - private Aggregations validateAggs(@Nullable Aggregations aggs) { - if (aggs == null) { - return null; - } - List aggsAsList = aggs.asList(); - if (aggsAsList.isEmpty()) { - return null; - } - if (aggsAsList.size() > 1) { - throw new IllegalArgumentException("Multiple top level aggregations not supported; found: " - + aggsAsList.stream().map(Aggregation::getName).collect(Collectors.toList())); - } - - return aggs; - } - - private InputStream processNextBatch() throws IOException { - outputStream.reset(); - - hasNext = aggregationToJsonProcessor.writeDocs(BATCH_KEY_VALUE_PAIRS, outputStream); - return new ByteArrayInputStream(outputStream.toByteArray()); - } - - private long getHistogramInterval() { - return ExtractorUtils.getHistogramIntervalMillis(context.aggs); - } - - AggregationDataExtractorContext getContext() { - return context; - } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/RollupDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/RollupDataExtractor.java new file mode 100644 index 00000000000..f5de574e99a --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/RollupDataExtractor.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.datafeed.extractor.aggregation; + +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.xpack.core.rollup.action.RollupSearchAction; + +/** + * An implementation that extracts data from elasticsearch using search with aggregations against rollup indexes on a client. + * The first time {@link #next()} is called, the search is executed. The result aggregations are + * stored and they are then processed in batches. Cancellation is supported between batches. + * Note that this class is NOT thread-safe. + */ +class RollupDataExtractor extends AbstractAggregationDataExtractor { + + RollupDataExtractor(Client client, AggregationDataExtractorContext dataExtractorContext) { + super(client, dataExtractorContext); + } + + @Override + protected RollupSearchAction.RequestBuilder buildSearchRequest(SearchSourceBuilder searchSourceBuilder) { + SearchRequest searchRequest = new SearchRequest().indices(context.indices).source(searchSourceBuilder); + + return new RollupSearchAction.RequestBuilder(client, searchRequest); + } + +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/RollupDataExtractorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/RollupDataExtractorFactory.java new file mode 100644 index 00000000000..c8a96d6c306 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/RollupDataExtractorFactory.java @@ -0,0 +1,218 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.datafeed.extractor.aggregation; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.Client; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; +import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; +import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; +import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.core.ml.utils.Intervals; +import org.elasticsearch.xpack.core.rollup.action.RollableIndexCaps; +import org.elasticsearch.xpack.core.rollup.action.RollupJobCaps.RollupFieldCaps; +import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.core.ml.datafeed.extractor.ExtractorUtils.getHistogramAggregation; +import static org.elasticsearch.xpack.core.ml.datafeed.extractor.ExtractorUtils.getHistogramIntervalMillis; +import static org.elasticsearch.xpack.core.ml.datafeed.extractor.ExtractorUtils.validateAndGetCalendarInterval; + +public class RollupDataExtractorFactory implements DataExtractorFactory { + + private final Client client; + private final DatafeedConfig datafeedConfig; + private final Job job; + + private RollupDataExtractorFactory(Client client, DatafeedConfig datafeedConfig, Job job) { + this.client = Objects.requireNonNull(client); + this.datafeedConfig = Objects.requireNonNull(datafeedConfig); + this.job = Objects.requireNonNull(job); + } + + @Override + public DataExtractor newExtractor(long start, long end) { + long histogramInterval = datafeedConfig.getHistogramIntervalMillis(); + AggregationDataExtractorContext dataExtractorContext = new AggregationDataExtractorContext( + job.getId(), + job.getDataDescription().getTimeField(), + job.getAnalysisConfig().analysisFields(), + datafeedConfig.getIndices(), + datafeedConfig.getTypes(), + datafeedConfig.getQuery(), + datafeedConfig.getAggregations(), + Intervals.alignToCeil(start, histogramInterval), + Intervals.alignToFloor(end, histogramInterval), + job.getAnalysisConfig().getSummaryCountFieldName().equals(DatafeedConfig.DOC_COUNT), + datafeedConfig.getHeaders()); + return new RollupDataExtractor(client, dataExtractorContext); + } + + public static void create(Client client, + DatafeedConfig datafeed, + Job job, + Map rollupJobsWithCaps, + ActionListener listener) { + + final AggregationBuilder datafeedHistogramAggregation = getHistogramAggregation( + datafeed.getAggregations().getAggregatorFactories()); + if ((datafeedHistogramAggregation instanceof DateHistogramAggregationBuilder) == false) { + listener.onFailure( + new IllegalArgumentException("Rollup requires that the datafeed configuration use a [date_histogram] aggregation," + + " not a [histogram] aggregation over the time field.")); + return; + } + + final String timeField = ((ValuesSourceAggregationBuilder) datafeedHistogramAggregation).field(); + + Set rollupCapsSet = rollupJobsWithCaps.values() + .stream() + .flatMap(rollableIndexCaps -> rollableIndexCaps.getJobCaps().stream()) + .map(rollupJobCaps -> ParsedRollupCaps.fromJobFieldCaps(rollupJobCaps.getFieldCaps(), timeField)) + .collect(Collectors.toSet()); + + final long datafeedInterval = getHistogramIntervalMillis(datafeedHistogramAggregation); + + List validIntervalCaps = rollupCapsSet.stream() + .filter(rollupCaps -> validInterval(datafeedInterval, rollupCaps)) + .collect(Collectors.toList()); + + if (validIntervalCaps.isEmpty()) { + listener.onFailure( + new IllegalArgumentException( + "Rollup capabilities do not have a [date_histogram] aggregation with an interval " + + "that is a multiple of the datafeed's interval.") + ); + return; + } + final List flattenedAggs = new ArrayList<>(); + flattenAggregations(datafeed.getAggregations().getAggregatorFactories(), datafeedHistogramAggregation, flattenedAggs); + + if (validIntervalCaps.stream().noneMatch(rollupJobConfig -> hasAggregations(rollupJobConfig, flattenedAggs))) { + listener.onFailure( + new IllegalArgumentException("Rollup capabilities do not support all the datafeed aggregations at the desired interval.") + ); + return; + } + + listener.onResponse(new RollupDataExtractorFactory(client, datafeed, job)); + } + + private static boolean validInterval(long datafeedInterval, ParsedRollupCaps rollupJobGroupConfig) { + if (rollupJobGroupConfig.hasDatehistogram() == false) { + return false; + } + if ("UTC".equalsIgnoreCase(rollupJobGroupConfig.getTimezone()) == false) { + return false; + } + try { + long jobInterval = validateAndGetCalendarInterval(rollupJobGroupConfig.getInterval()); + return datafeedInterval % jobInterval == 0; + } catch (ElasticsearchStatusException exception) { + return false; + } + } + + private static void flattenAggregations(final Collection datafeedAggregations, + final AggregationBuilder datafeedHistogramAggregation, + final List flattenedAggregations) { + for (AggregationBuilder aggregationBuilder : datafeedAggregations) { + if (aggregationBuilder.equals(datafeedHistogramAggregation) == false) { + flattenedAggregations.add((ValuesSourceAggregationBuilder)aggregationBuilder); + } + flattenAggregations(aggregationBuilder.getSubAggregations(), datafeedHistogramAggregation, flattenedAggregations); + } + } + + private static boolean hasAggregations(ParsedRollupCaps rollupCaps, List datafeedAggregations) { + for (ValuesSourceAggregationBuilder aggregationBuilder : datafeedAggregations) { + String type = aggregationBuilder.getType(); + String field = aggregationBuilder.field(); + if (aggregationBuilder instanceof TermsAggregationBuilder) { + if (rollupCaps.supportedTerms.contains(field) == false) { + return false; + } + } else { + if (rollupCaps.supportedMetrics.contains(field + "_" + type) == false) { + return false; + } + } + } + return true; + } + + private static class ParsedRollupCaps { + private final Set supportedMetrics; + private final Set supportedTerms; + private final Map datehistogramAgg; + private static final List aggsToIgnore = + Arrays.asList(HistogramAggregationBuilder.NAME, DateHistogramAggregationBuilder.NAME); + + private static ParsedRollupCaps fromJobFieldCaps(Map rollupFieldCaps, String timeField) { + Map datehistogram = null; + RollupFieldCaps timeFieldCaps = rollupFieldCaps.get(timeField); + if (timeFieldCaps != null) { + for(Map agg : timeFieldCaps.getAggs()) { + if (agg.get("agg").equals(DateHistogramAggregationBuilder.NAME)) { + datehistogram = agg; + } + } + } + Set supportedMetrics = new HashSet<>(); + Set supportedTerms = new HashSet<>(); + rollupFieldCaps.forEach((field, fieldCaps) -> { + fieldCaps.getAggs().forEach(agg -> { + String type = (String)agg.get("agg"); + if (type.equals(TermsAggregationBuilder.NAME)) { + supportedTerms.add(field); + } else if (aggsToIgnore.contains(type) == false) { + supportedMetrics.add(field + "_" + type); + } + }); + }); + return new ParsedRollupCaps(supportedMetrics, supportedTerms, datehistogram); + } + + private ParsedRollupCaps(Set supportedMetrics, Set supportedTerms, Map datehistogramAgg) { + this.supportedMetrics = supportedMetrics; + this.supportedTerms = supportedTerms; + this.datehistogramAgg = datehistogramAgg; + } + + private String getInterval() { + if (datehistogramAgg == null) { + return null; + } + return (String)datehistogramAgg.get(DateHistogramGroupConfig.INTERVAL); + } + + private String getTimezone() { + if (datehistogramAgg == null) { + return null; + } + return (String)datehistogramAgg.get(DateHistogramGroupConfig.TIME_ZONE); + } + + private boolean hasDatehistogram() { + return datehistogramAgg != null; + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java index 57040aa1474..dea9aca1d48 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java @@ -7,7 +7,9 @@ package org.elasticsearch.xpack.ml.datafeed.extractor.chunked; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; @@ -15,10 +17,14 @@ import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.search.aggregations.metrics.Min; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.core.ml.datafeed.extractor.ExtractorUtils; +import org.elasticsearch.xpack.core.rollup.action.RollupSearchAction; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; +import org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.RollupDataExtractorFactory; import java.io.IOException; import java.io.InputStream; @@ -43,6 +49,13 @@ import java.util.Optional; */ public class ChunkedDataExtractor implements DataExtractor { + private interface DataSummary { + long estimateChunk(); + boolean hasData(); + long earliestTime(); + long getDataTimeSpread(); + } + private static final Logger LOGGER = LogManager.getLogger(ChunkedDataExtractor.class); private static final String EARLIEST_TIME = "earliest_time"; @@ -54,6 +67,7 @@ public class ChunkedDataExtractor implements DataExtractor { private final Client client; private final DataExtractorFactory dataExtractorFactory; private final ChunkedDataExtractorContext context; + private final DataSummaryFactory dataSummaryFactory; private long currentStart; private long currentEnd; private long chunkSpan; @@ -67,6 +81,7 @@ public class ChunkedDataExtractor implements DataExtractor { this.currentStart = context.start; this.currentEnd = context.start; this.isCancelled = false; + this.dataSummaryFactory = new DataSummaryFactory(); } @Override @@ -93,48 +108,21 @@ public class ChunkedDataExtractor implements DataExtractor { } private void setUpChunkedSearch() throws IOException { - DataSummary dataSummary = requestDataSummary(); - if (dataSummary.totalHits > 0) { - currentStart = context.timeAligner.alignToFloor(dataSummary.earliestTime); + DataSummary dataSummary = dataSummaryFactory.buildDataSummary(); + if (dataSummary.hasData()) { + currentStart = context.timeAligner.alignToFloor(dataSummary.earliestTime()); currentEnd = currentStart; chunkSpan = context.chunkSpan == null ? dataSummary.estimateChunk() : context.chunkSpan.getMillis(); chunkSpan = context.timeAligner.alignToCeil(chunkSpan); - LOGGER.debug("[{}]Chunked search configured: totalHits = {}, dataTimeSpread = {} ms, chunk span = {} ms", - context.jobId, dataSummary.totalHits, dataSummary.getDataTimeSpread(), chunkSpan); + LOGGER.debug("[{}]Chunked search configured: kind = {}, dataTimeSpread = {} ms, chunk span = {} ms", + context.jobId, dataSummary.getClass().getSimpleName(), dataSummary.getDataTimeSpread(), chunkSpan); } else { // search is over currentEnd = context.end; } } - private DataSummary requestDataSummary() throws IOException { - SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client, SearchAction.INSTANCE) - .setSize(0) - .setIndices(context.indices) - .setTypes(context.types) - .setQuery(ExtractorUtils.wrapInTimeRangeQuery(context.query, context.timeField, currentStart, context.end)) - .addAggregation(AggregationBuilders.min(EARLIEST_TIME).field(context.timeField)) - .addAggregation(AggregationBuilders.max(LATEST_TIME).field(context.timeField)); - - SearchResponse response = executeSearchRequest(searchRequestBuilder); - LOGGER.debug("[{}] Data summary response was obtained", context.jobId); - - ExtractorUtils.checkSearchWasSuccessful(context.jobId, response); - - Aggregations aggregations = response.getAggregations(); - long earliestTime = 0; - long latestTime = 0; - long totalHits = response.getHits().getTotalHits(); - if (totalHits > 0) { - Min min = aggregations.get(EARLIEST_TIME); - earliestTime = (long) min.getValue(); - Max max = aggregations.get(LATEST_TIME); - latestTime = (long) max.getValue(); - } - return new DataSummary(earliestTime, latestTime, totalHits); - } - - protected SearchResponse executeSearchRequest(SearchRequestBuilder searchRequestBuilder) { + protected SearchResponse executeSearchRequest(ActionRequestBuilder searchRequestBuilder) { return ClientHelper.executeWithHeaders(context.headers, ClientHelper.ML_ORIGIN, client, searchRequestBuilder::get); } @@ -182,19 +170,101 @@ public class ChunkedDataExtractor implements DataExtractor { isCancelled = true; } - private class DataSummary { + ChunkedDataExtractorContext getContext() { + return context; + } + + private class DataSummaryFactory { + + /** + * If there are aggregations, an AggregatedDataSummary object is created. It returns a ScrollingDataSummary otherwise. + * + * By default a DatafeedConfig with aggregations, should already have a manual ChunkingConfig created. + * However, the end user could have specifically set the ChunkingConfig to AUTO, which would not really work for aggregations. + * So, if we need to gather an appropriate chunked time for aggregations, we can utilize the AggregatedDataSummary + * + * @return DataSummary object + * @throws IOException when timefield range search fails + */ + private DataSummary buildDataSummary() throws IOException { + return context.hasAggregations ? newAggregatedDataSummary() : newScrolledDataSummary(); + } + + private DataSummary newScrolledDataSummary() throws IOException { + SearchRequestBuilder searchRequestBuilder = rangeSearchRequest().setTypes(context.types); + + SearchResponse response = executeSearchRequest(searchRequestBuilder); + LOGGER.debug("[{}] Scrolling Data summary response was obtained", context.jobId); + + ExtractorUtils.checkSearchWasSuccessful(context.jobId, response); + + Aggregations aggregations = response.getAggregations(); + long earliestTime = 0; + long latestTime = 0; + long totalHits = response.getHits().getTotalHits(); + if (totalHits > 0) { + Min min = aggregations.get(EARLIEST_TIME); + earliestTime = (long) min.getValue(); + Max max = aggregations.get(LATEST_TIME); + latestTime = (long) max.getValue(); + } + return new ScrolledDataSummary(earliestTime, latestTime, totalHits); + } + + private DataSummary newAggregatedDataSummary() throws IOException { + // TODO: once RollupSearchAction is changed from indices:admin* to indices:data/read/* this branch is not needed + ActionRequestBuilder searchRequestBuilder = + dataExtractorFactory instanceof RollupDataExtractorFactory ? rollupRangeSearchRequest() : rangeSearchRequest(); + SearchResponse response = executeSearchRequest(searchRequestBuilder); + LOGGER.debug("[{}] Aggregating Data summary response was obtained", context.jobId); + + ExtractorUtils.checkSearchWasSuccessful(context.jobId, response); + + Aggregations aggregations = response.getAggregations(); + Min min = aggregations.get(EARLIEST_TIME); + Max max = aggregations.get(LATEST_TIME); + return new AggregatedDataSummary(min.getValue(), max.getValue(), context.histogramInterval); + } + + private SearchSourceBuilder rangeSearchBuilder() { + return new SearchSourceBuilder() + .size(0) + .query(ExtractorUtils.wrapInTimeRangeQuery(context.query, context.timeField, currentStart, context.end)) + .aggregation(AggregationBuilders.min(EARLIEST_TIME).field(context.timeField)) + .aggregation(AggregationBuilders.max(LATEST_TIME).field(context.timeField)); + } + + private SearchRequestBuilder rangeSearchRequest() { + return new SearchRequestBuilder(client, SearchAction.INSTANCE) + .setIndices(context.indices) + .setSource(rangeSearchBuilder()); + } + + private RollupSearchAction.RequestBuilder rollupRangeSearchRequest() { + SearchRequest searchRequest = new SearchRequest().indices(context.indices).source(rangeSearchBuilder()); + return new RollupSearchAction.RequestBuilder(client, searchRequest); + } + } + + private class ScrolledDataSummary implements DataSummary { private long earliestTime; private long latestTime; private long totalHits; - private DataSummary(long earliestTime, long latestTime, long totalHits) { + private ScrolledDataSummary(long earliestTime, long latestTime, long totalHits) { this.earliestTime = earliestTime; this.latestTime = latestTime; this.totalHits = totalHits; } - private long getDataTimeSpread() { + @Override + public long earliestTime() { + return earliestTime; + } + + @Override + public long getDataTimeSpread() { return latestTime - earliestTime; } @@ -206,7 +276,8 @@ public class ChunkedDataExtractor implements DataExtractor { * However, assuming this as the chunk span may often lead to half-filled pages or empty searches. * It is beneficial to take a multiple of that. Based on benchmarking, we set this to 10x. */ - private long estimateChunk() { + @Override + public long estimateChunk() { long dataTimeSpread = getDataTimeSpread(); if (totalHits <= 0 || dataTimeSpread <= 0) { return context.end - currentEnd; @@ -214,9 +285,46 @@ public class ChunkedDataExtractor implements DataExtractor { long estimatedChunk = 10 * (context.scrollSize * getDataTimeSpread()) / totalHits; return Math.max(estimatedChunk, MIN_CHUNK_SPAN); } + + @Override + public boolean hasData() { + return totalHits > 0; + } } - ChunkedDataExtractorContext getContext() { - return context; + private class AggregatedDataSummary implements DataSummary { + + private final double earliestTime; + private final double latestTime; + private final long histogramIntervalMillis; + + private AggregatedDataSummary(double earliestTime, double latestTime, long histogramInterval) { + this.earliestTime = earliestTime; + this.latestTime = latestTime; + this.histogramIntervalMillis = histogramInterval; + } + + /** + * This heuristic is a direct copy of the manual chunking config auto-creation done in {@link DatafeedConfig.Builder} + */ + @Override + public long estimateChunk() { + return DatafeedConfig.Builder.DEFAULT_AGGREGATION_CHUNKING_BUCKETS * histogramIntervalMillis; + } + + @Override + public boolean hasData() { + return (Double.isInfinite(earliestTime) || Double.isInfinite(latestTime)) == false; + } + + @Override + public long earliestTime() { + return (long)earliestTime; + } + + @Override + public long getDataTimeSpread() { + return (long)latestTime - (long)earliestTime; + } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorContext.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorContext.java index 38c2efd8679..bb32b40f7cd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorContext.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorContext.java @@ -31,10 +31,13 @@ class ChunkedDataExtractorContext { final TimeValue chunkSpan; final TimeAligner timeAligner; final Map headers; + final boolean hasAggregations; + final Long histogramInterval; ChunkedDataExtractorContext(String jobId, String timeField, List indices, List types, QueryBuilder query, int scrollSize, long start, long end, @Nullable TimeValue chunkSpan, - TimeAligner timeAligner, Map headers) { + TimeAligner timeAligner, Map headers, boolean hasAggregations, + @Nullable Long histogramInterval) { this.jobId = Objects.requireNonNull(jobId); this.timeField = Objects.requireNonNull(timeField); this.indices = indices.toArray(new String[indices.size()]); @@ -46,5 +49,7 @@ class ChunkedDataExtractorContext { this.chunkSpan = chunkSpan; this.timeAligner = Objects.requireNonNull(timeAligner); this.headers = headers; + this.hasAggregations = hasAggregations; + this.histogramInterval = histogramInterval; } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java index 7b5bac64740..67079cf2e67 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java @@ -42,7 +42,10 @@ public class ChunkedDataExtractorFactory implements DataExtractorFactory { timeAligner.alignToFloor(end), datafeedConfig.getChunkingConfig().getTimeSpan(), timeAligner, - datafeedConfig.getHeaders()); + datafeedConfig.getHeaders(), + datafeedConfig.hasAggregations(), + datafeedConfig.hasAggregations() ? datafeedConfig.getHistogramIntervalMillis() : null + ); return new ChunkedDataExtractor(client, dataExtractorFactory, dataExtractorContext); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java index cca5ae6c13f..2e7a19bca1c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java @@ -99,7 +99,6 @@ public class JobManager extends AbstractComponent { public JobManager(Environment environment, Settings settings, JobResultsProvider jobResultsProvider, ClusterService clusterService, Auditor auditor, Client client, UpdateJobProcessNotifier updateJobProcessNotifier) { - super(settings); this.settings = settings; this.environment = environment; this.jobResultsProvider = Objects.requireNonNull(jobResultsProvider); @@ -492,7 +491,7 @@ public class JobManager extends AbstractComponent { ModelSnapshot modelSnapshot) { final ModelSizeStats modelSizeStats = modelSnapshot.getModelSizeStats(); - final JobResultsPersister persister = new JobResultsPersister(settings, client); + final JobResultsPersister persister = new JobResultsPersister(client); // Step 3. After the model size stats is persisted, also persist the snapshot's quantiles and respond // ------- diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/UpdateJobProcessNotifier.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/UpdateJobProcessNotifier.java index 7f7aa0a6553..29e98d01ca9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/UpdateJobProcessNotifier.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/UpdateJobProcessNotifier.java @@ -12,9 +12,7 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.LifecycleListener; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.action.UpdateProcessAction; @@ -46,9 +44,9 @@ import static org.elasticsearch.xpack.core.ml.action.UpdateProcessAction.Respons * will fetch the valid state of those external resources ensuring the process is * in sync. */ -public class UpdateJobProcessNotifier extends AbstractComponent { +public class UpdateJobProcessNotifier { - private static final Logger LOGGER = LogManager.getLogger(UpdateJobProcessNotifier.class); + private static final Logger logger = LogManager.getLogger(UpdateJobProcessNotifier.class); private final Client client; private final ClusterService clusterService; @@ -57,8 +55,7 @@ public class UpdateJobProcessNotifier extends AbstractComponent { private volatile ThreadPool.Cancellable cancellable; - public UpdateJobProcessNotifier(Settings settings, Client client, ClusterService clusterService, ThreadPool threadPool) { - super(settings); + public UpdateJobProcessNotifier(Client client, ClusterService clusterService, ThreadPool threadPool) { this.client = client; this.clusterService = clusterService; this.threadPool = threadPool; @@ -112,7 +109,7 @@ public class UpdateJobProcessNotifier extends AbstractComponent { if (update.isJobUpdate() && clusterService.localNode().isMasterNode() == false) { assert clusterService.localNode().isMasterNode(); - LOGGER.error("Job update was submitted to non-master node [" + clusterService.getNodeName() + "]; update for job [" + logger.error("Job update was submitted to non-master node [" + clusterService.getNodeName() + "]; update for job [" + update.getJobId() + "] will be ignored"); executeProcessUpdates(updatesIterator); return; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataCountsPersister.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataCountsPersister.java index d3b66f47661..c0b8b4d9365 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataCountsPersister.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataCountsPersister.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; @@ -34,8 +33,7 @@ public class JobDataCountsPersister extends AbstractComponent { private final Client client; - public JobDataCountsPersister(Settings settings, Client client) { - super(settings); + public JobDataCountsPersister(Client client) { this.client = client; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobRenormalizedResultsPersister.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobRenormalizedResultsPersister.java index f318737e0b4..5ee05feaebf 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobRenormalizedResultsPersister.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobRenormalizedResultsPersister.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.client.Client; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -49,8 +48,7 @@ public class JobRenormalizedResultsPersister extends AbstractComponent { private final Client client; private BulkRequest bulkRequest; - public JobRenormalizedResultsPersister(String jobId, Settings settings, Client client) { - super(settings); + public JobRenormalizedResultsPersister(String jobId, Client client) { this.jobId = jobId; this.client = client; bulkRequest = new BulkRequest(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java index 233a2b4078a..782f1fc39ef 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java @@ -19,7 +19,6 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.Client; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -68,8 +67,7 @@ public class JobResultsPersister extends AbstractComponent { private final Client client; - public JobResultsPersister(Settings settings, Client client) { - super(settings); + public JobResultsPersister(Client client) { this.client = client; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporter.java index 0d2b1bb345a..1bc214e7b40 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporter.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporter.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.job.process; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; @@ -52,10 +51,7 @@ public class DataCountsReporter extends AbstractComponent { private DataStreamDiagnostics diagnostics; - public DataCountsReporter(Settings settings, Job job, DataCounts counts, JobDataCountsPersister dataCountsPersister) { - - super(settings); - + public DataCountsReporter(Job job, DataCounts counts, JobDataCountsPersister dataCountsPersister) { this.job = job; this.dataCountsPersister = dataCountsPersister; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java index ea9442b8367..887ea5262ae 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java @@ -137,7 +137,6 @@ public class AutodetectProcessManager extends AbstractComponent { JobDataCountsPersister jobDataCountsPersister, AutodetectProcessFactory autodetectProcessFactory, NormalizerFactory normalizerFactory, NamedXContentRegistry xContentRegistry, Auditor auditor) { - super(settings); this.settings = settings; this.environment = environment; this.client = client; @@ -495,12 +494,9 @@ public class AutodetectProcessManager extends AbstractComponent { Job job = jobManager.getJobOrThrowIfUnknown(jobId); // A TP with no queue, so that we fail immediately if there are no threads available ExecutorService autoDetectExecutorService = threadPool.executor(MachineLearning.AUTODETECT_THREAD_POOL_NAME); - DataCountsReporter dataCountsReporter = new DataCountsReporter(settings, - job, - autodetectParams.dataCounts(), - jobDataCountsPersister); + DataCountsReporter dataCountsReporter = new DataCountsReporter(job, autodetectParams.dataCounts(), jobDataCountsPersister); ScoresUpdater scoresUpdater = new ScoresUpdater(job, jobResultsProvider, - new JobRenormalizedResultsPersister(job.getId(), settings, client), normalizerFactory); + new JobRenormalizedResultsPersister(job.getId(), client), normalizerFactory); ExecutorService renormalizerExecutorService = threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME); Renormalizer renormalizer = new ShortCircuitingRenormalizer(jobId, scoresUpdater, renormalizerExecutorService); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessFactory.java index 9b88e31b2fa..3185ebc6f1c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessFactory.java @@ -68,7 +68,7 @@ public class NativeAutodetectProcessFactory implements AutodetectProcessFactory int numberOfFields = job.allInputFields().size() + (includeTokensField ? 1 : 0) + 1; AutodetectStateProcessor stateProcessor = new AutodetectStateProcessor(client, job.getId()); - AutodetectResultsParser resultsParser = new AutodetectResultsParser(settings); + AutodetectResultsParser resultsParser = new AutodetectResultsParser(); NativeAutodetectProcess autodetect = new NativeAutodetectProcess( job.getId(), processPipes.getLogStream().get(), processPipes.getProcessInStream().get(), processPipes.getProcessOutStream().get(), processPipes.getRestoreStream().orElse(null), numberOfFields, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultsParser.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultsParser.java index d4f9c431f7b..8d7e64c3d45 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultsParser.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultsParser.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.job.process.autodetect.output; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; @@ -27,11 +26,6 @@ import java.util.Iterator; * start array symbol and the data must be terminated with the end array symbol. */ public class AutodetectResultsParser extends AbstractComponent { - - public AutodetectResultsParser(Settings settings) { - super(settings); - } - public Iterator parseResults(InputStream in) throws ElasticsearchParseException { try { XContentParser parser = XContentFactory.xContent(XContentType.JSON) diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java index 485556d8441..19084ad7f96 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java @@ -5,15 +5,29 @@ */ package org.elasticsearch.xpack.ml; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.LicenseService; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.monitoring.Monitoring; import org.elasticsearch.xpack.security.Security; import java.nio.file.Path; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; public class LocalStateMachineLearning extends LocalStateCompositeXPackPlugin { @@ -50,6 +64,38 @@ public class LocalStateMachineLearning extends LocalStateCompositeXPackPlugin { @Override protected XPackLicenseState getLicenseState() { return thisVar.getLicenseState(); } }); + plugins.add(new MockedRollupPlugin()); + } + + /** + * This is only required as we now have to have the GetRollupIndexCapsAction as a valid action in our node. + * The MachineLearningLicenseTests attempt to create a datafeed referencing this LocalStateMachineLearning object. + * Consequently, we need to be able to take this rollup action (response does not matter) + * as the datafeed extractor now depends on it. + */ + public static class MockedRollupPlugin extends Plugin implements ActionPlugin { + + @Override + public List> getActions() { + return Collections.singletonList( + new ActionHandler<>(GetRollupIndexCapsAction.INSTANCE, MockedRollupIndexCapsTransport.class) + ); + } + + public static class MockedRollupIndexCapsTransport + extends TransportAction { + + @Inject + public MockedRollupIndexCapsTransport(Settings settings, TransportService transportService) { + super(settings, GetRollupIndexCapsAction.NAME, new ActionFilters(new HashSet<>()), transportService.getTaskManager()); + } + + @Override + protected void doExecute(Task task, + GetRollupIndexCapsAction.Request request, + ActionListener listener) { + listener.onResponse(new GetRollupIndexCapsAction.Response()); + } + } } } - diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlAssignmentNotifierTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlAssignmentNotifierTests.java index 3055dc2bb37..7cd0d3cf008 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlAssignmentNotifierTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlAssignmentNotifierTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; @@ -35,7 +34,7 @@ public class MlAssignmentNotifierTests extends ESTestCase { public void testClusterChanged_info() throws Exception { Auditor auditor = mock(Auditor.class); ClusterService clusterService = mock(ClusterService.class); - MlAssignmentNotifier notifier = new MlAssignmentNotifier(Settings.EMPTY, auditor, clusterService); + MlAssignmentNotifier notifier = new MlAssignmentNotifier(auditor, clusterService); notifier.onMaster(); DiscoveryNode node = @@ -63,7 +62,7 @@ public class MlAssignmentNotifierTests extends ESTestCase { public void testClusterChanged_warning() throws Exception { Auditor auditor = mock(Auditor.class); ClusterService clusterService = mock(ClusterService.class); - MlAssignmentNotifier notifier = new MlAssignmentNotifier(Settings.EMPTY, auditor, clusterService); + MlAssignmentNotifier notifier = new MlAssignmentNotifier(auditor, clusterService); notifier.onMaster(); ClusterState previous = ClusterState.builder(new ClusterName("_name")) diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlInitializationServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlInitializationServiceTests.java index c7f50440f0e..5ded1b205a1 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlInitializationServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlInitializationServiceTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -62,7 +61,7 @@ public class MlInitializationServiceTests extends ESTestCase { } public void testInitialize() { - MlInitializationService initializationService = new MlInitializationService(Settings.EMPTY, threadPool, clusterService, client); + MlInitializationService initializationService = new MlInitializationService(threadPool, clusterService, client); ClusterState cs = ClusterState.builder(new ClusterName("_name")) .nodes(DiscoveryNodes.builder() @@ -77,7 +76,7 @@ public class MlInitializationServiceTests extends ESTestCase { } public void testInitialize_noMasterNode() { - MlInitializationService initializationService = new MlInitializationService(Settings.EMPTY, threadPool, clusterService, client); + MlInitializationService initializationService = new MlInitializationService(threadPool, clusterService, client); ClusterState cs = ClusterState.builder(new ClusterName("_name")) .nodes(DiscoveryNodes.builder() @@ -90,7 +89,7 @@ public class MlInitializationServiceTests extends ESTestCase { } public void testInitialize_alreadyInitialized() { - MlInitializationService initializationService = new MlInitializationService(Settings.EMPTY, threadPool, clusterService, client); + MlInitializationService initializationService = new MlInitializationService(threadPool, clusterService, client); ClusterState cs = ClusterState.builder(new ClusterName("_name")) .nodes(DiscoveryNodes.builder() @@ -108,7 +107,7 @@ public class MlInitializationServiceTests extends ESTestCase { } public void testNodeGoesFromMasterToNonMasterAndBack() { - MlInitializationService initializationService = new MlInitializationService(Settings.EMPTY, threadPool, clusterService, client); + MlInitializationService initializationService = new MlInitializationService(threadPool, clusterService, client); MlDailyMaintenanceService initialDailyMaintenanceService = mock(MlDailyMaintenanceService.class); initializationService.setDailyMaintenanceService(initialDailyMaintenanceService); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java index 4b8ad1d08ae..ad5d2dbe5ef 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java @@ -23,7 +23,6 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; @@ -57,7 +56,7 @@ public class DatafeedNodeSelectorTests extends ESTestCase { @Before public void init() { - resolver = new IndexNameExpressionResolver(Settings.EMPTY); + resolver = new IndexNameExpressionResolver(); nodes = DiscoveryNodes.builder() .add(new DiscoveryNode("node_name", "node_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9300), Collections.emptyMap(), Collections.emptySet(), Version.CURRENT)) diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactoryTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactoryTests.java index 11ff693bad7..9e229e2b057 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactoryTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactoryTests.java @@ -14,23 +14,39 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.datafeed.ChunkingConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; +import org.elasticsearch.xpack.core.ml.job.config.DataDescription; +import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; +import org.elasticsearch.xpack.core.rollup.action.RollableIndexCaps; +import org.elasticsearch.xpack.core.rollup.action.RollupJobCaps; +import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; +import org.elasticsearch.xpack.core.rollup.job.GroupConfig; +import org.elasticsearch.xpack.core.rollup.job.MetricConfig; +import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; +import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedManagerTests; import org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationDataExtractorFactory; import org.elasticsearch.xpack.ml.datafeed.extractor.chunked.ChunkedDataExtractorFactory; +import org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.RollupDataExtractorFactory; import org.elasticsearch.xpack.ml.datafeed.extractor.scroll.ScrollDataExtractorFactory; -import org.elasticsearch.xpack.core.ml.job.config.DataDescription; -import org.elasticsearch.xpack.core.ml.job.config.Job; import org.junit.Before; +import java.util.Arrays; +import java.util.Collections; import java.util.Date; import java.util.HashMap; +import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.mockito.Matchers.any; import static org.mockito.Matchers.same; @@ -41,6 +57,7 @@ import static org.mockito.Mockito.when; public class DataExtractorFactoryTests extends ESTestCase { private FieldCapabilitiesResponse fieldsCapabilities; + private GetRollupIndexCapsAction.Response getRollupIndexResponse; private Client client; @@ -54,12 +71,22 @@ public class DataExtractorFactoryTests extends ESTestCase { givenAggregatableField("time", "date"); givenAggregatableField("field", "keyword"); + getRollupIndexResponse = mock(GetRollupIndexCapsAction.Response.class); + when(getRollupIndexResponse.getJobs()).thenReturn(new HashMap<>()); + doAnswer(invocationMock -> { @SuppressWarnings("raw_types") ActionListener listener = (ActionListener) invocationMock.getArguments()[2]; listener.onResponse(fieldsCapabilities); return null; }).when(client).execute(same(FieldCapabilitiesAction.INSTANCE), any(), any()); + + doAnswer(invocationMock -> { + @SuppressWarnings("raw_types") + ActionListener listener = (ActionListener) invocationMock.getArguments()[2]; + listener.onResponse(getRollupIndexResponse); + return null; + }).when(client).execute(same(GetRollupIndexCapsAction.INSTANCE), any(), any()); } public void testCreateDataExtractorFactoryGivenDefaultScroll() { @@ -165,6 +192,162 @@ public class DataExtractorFactoryTests extends ESTestCase { DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener); } + public void testCreateDataExtractorFactoryGivenRollupAndValidAggregation() { + givenAggregatableRollup("myField", "max", 5, "termField"); + DataDescription.Builder dataDescription = new DataDescription.Builder(); + dataDescription.setTimeField("time"); + Job.Builder jobBuilder = DatafeedManagerTests.createDatafeedJob(); + jobBuilder.setDataDescription(dataDescription); + DatafeedConfig.Builder datafeedConfig = DatafeedManagerTests.createDatafeedConfig("datafeed1", "foo"); + datafeedConfig.setChunkingConfig(ChunkingConfig.newOff()); + MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time"); + MaxAggregationBuilder myField = AggregationBuilders.max("myField").field("myField"); + TermsAggregationBuilder myTerm = AggregationBuilders.terms("termAgg").field("termField").subAggregation(myField); + datafeedConfig.setAggregations(AggregatorFactories.builder().addAggregator( + AggregationBuilders.dateHistogram("time").interval(600_000).subAggregation(maxTime).subAggregation(myTerm).field("time"))); + ActionListener listener = ActionListener.wrap( + dataExtractorFactory -> assertThat(dataExtractorFactory, instanceOf(RollupDataExtractorFactory.class)), + e -> fail() + ); + DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener); + } + + public void testCreateDataExtractorFactoryGivenRollupAndValidAggregationAndAutoChunk() { + givenAggregatableRollup("myField", "max", 5, "termField"); + DataDescription.Builder dataDescription = new DataDescription.Builder(); + dataDescription.setTimeField("time"); + Job.Builder jobBuilder = DatafeedManagerTests.createDatafeedJob(); + jobBuilder.setDataDescription(dataDescription); + DatafeedConfig.Builder datafeedConfig = DatafeedManagerTests.createDatafeedConfig("datafeed1", "foo"); + datafeedConfig.setChunkingConfig(ChunkingConfig.newAuto()); + MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time"); + MaxAggregationBuilder myField = AggregationBuilders.max("myField").field("myField"); + TermsAggregationBuilder myTerm = AggregationBuilders.terms("termAgg").field("termField").subAggregation(myField); + datafeedConfig.setAggregations(AggregatorFactories.builder().addAggregator( + AggregationBuilders.dateHistogram("time").interval(600_000).subAggregation(maxTime).subAggregation(myTerm).field("time"))); + ActionListener listener = ActionListener.wrap( + dataExtractorFactory -> assertThat(dataExtractorFactory, instanceOf(ChunkedDataExtractorFactory.class)), + e -> fail() + ); + DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener); + } + + public void testCreateDataExtractorFactoryGivenRollupButNoAggregations() { + givenAggregatableRollup("myField", "max", 5); + DataDescription.Builder dataDescription = new DataDescription.Builder(); + dataDescription.setTimeField("time"); + Job.Builder jobBuilder = DatafeedManagerTests.createDatafeedJob(); + jobBuilder.setDataDescription(dataDescription); + DatafeedConfig.Builder datafeedConfig = DatafeedManagerTests.createDatafeedConfig("datafeed1", "foo"); + datafeedConfig.setChunkingConfig(ChunkingConfig.newOff()); + + ActionListener listener = ActionListener.wrap( + dataExtractorFactory -> fail(), + e -> { + assertThat(e.getMessage(), equalTo("Aggregations are required when using Rollup indices")); + assertThat(e, instanceOf(IllegalArgumentException.class)); + } + ); + + DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener); + } + + public void testCreateDataExtractorFactoryGivenRollupWithBadInterval() { + givenAggregatableRollup("myField", "max", 7, "termField"); + DataDescription.Builder dataDescription = new DataDescription.Builder(); + dataDescription.setTimeField("time"); + Job.Builder jobBuilder = DatafeedManagerTests.createDatafeedJob(); + jobBuilder.setDataDescription(dataDescription); + DatafeedConfig.Builder datafeedConfig = DatafeedManagerTests.createDatafeedConfig("datafeed1", "foo"); + datafeedConfig.setChunkingConfig(ChunkingConfig.newOff()); + MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time"); + MaxAggregationBuilder myField = AggregationBuilders.max("myField").field("myField"); + TermsAggregationBuilder myTerm = AggregationBuilders.terms("termAgg").field("termField").subAggregation(myField); + datafeedConfig.setAggregations(AggregatorFactories.builder().addAggregator( + AggregationBuilders.dateHistogram("time").interval(600_000).subAggregation(maxTime).subAggregation(myTerm).field("time"))); + ActionListener listener = ActionListener.wrap( + dataExtractorFactory -> fail(), + e -> { + assertThat(e.getMessage(), + containsString("Rollup capabilities do not have a [date_histogram] aggregation with an interval " + + "that is a multiple of the datafeed's interval.")); + assertThat(e, instanceOf(IllegalArgumentException.class)); + } + ); + DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener); + } + + public void testCreateDataExtractorFactoryGivenRollupMissingTerms() { + givenAggregatableRollup("myField", "max", 5); + DataDescription.Builder dataDescription = new DataDescription.Builder(); + dataDescription.setTimeField("time"); + Job.Builder jobBuilder = DatafeedManagerTests.createDatafeedJob(); + jobBuilder.setDataDescription(dataDescription); + DatafeedConfig.Builder datafeedConfig = DatafeedManagerTests.createDatafeedConfig("datafeed1", "foo"); + datafeedConfig.setChunkingConfig(ChunkingConfig.newOff()); + MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time"); + MaxAggregationBuilder myField = AggregationBuilders.max("myField").field("myField"); + TermsAggregationBuilder myTerm = AggregationBuilders.terms("termAgg").field("termField").subAggregation(myField); + datafeedConfig.setAggregations(AggregatorFactories.builder().addAggregator( + AggregationBuilders.dateHistogram("time").interval(600_000).subAggregation(maxTime).subAggregation(myTerm).field("time"))); + ActionListener listener = ActionListener.wrap( + dataExtractorFactory -> fail(), + e -> { + assertThat(e.getMessage(), + containsString("Rollup capabilities do not support all the datafeed aggregations at the desired interval.")); + assertThat(e, instanceOf(IllegalArgumentException.class)); + } + ); + DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener); + } + + public void testCreateDataExtractorFactoryGivenRollupMissingMetric() { + givenAggregatableRollup("myField", "max", 5, "termField"); + DataDescription.Builder dataDescription = new DataDescription.Builder(); + dataDescription.setTimeField("time"); + Job.Builder jobBuilder = DatafeedManagerTests.createDatafeedJob(); + jobBuilder.setDataDescription(dataDescription); + DatafeedConfig.Builder datafeedConfig = DatafeedManagerTests.createDatafeedConfig("datafeed1", "foo"); + datafeedConfig.setChunkingConfig(ChunkingConfig.newOff()); + MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time"); + MaxAggregationBuilder myField = AggregationBuilders.max("myField").field("otherField"); + TermsAggregationBuilder myTerm = AggregationBuilders.terms("termAgg").field("termField").subAggregation(myField); + datafeedConfig.setAggregations(AggregatorFactories.builder().addAggregator( + AggregationBuilders.dateHistogram("time").interval(600_000).subAggregation(maxTime).subAggregation(myTerm).field("time"))); + ActionListener listener = ActionListener.wrap( + dataExtractorFactory -> fail(), + e -> { + assertThat(e.getMessage(), + containsString("Rollup capabilities do not support all the datafeed aggregations at the desired interval.")); + assertThat(e, instanceOf(IllegalArgumentException.class)); + } + ); + DataExtractorFactory.create(client, datafeedConfig.build(), jobBuilder.build(new Date()), listener); + } + + private void givenAggregatableRollup(String field, String type, int minuteInterval, String... groupByTerms) { + List metricConfigs = Arrays.asList(new MetricConfig(field, Collections.singletonList(type)), + new MetricConfig("time", Arrays.asList("min", "max"))); + TermsGroupConfig termsGroupConfig = null; + if (groupByTerms.length > 0) { + termsGroupConfig = new TermsGroupConfig(groupByTerms); + } + RollupJobConfig rollupJobConfig = new RollupJobConfig("rollupJob1", + "myIndexes*", + "myIndex_rollup", + "*/30 * * * * ?", + 300, + new GroupConfig( + new DateHistogramGroupConfig("time", DateHistogramInterval.minutes(minuteInterval)), null, termsGroupConfig), + metricConfigs, + null); + RollupJobCaps rollupJobCaps = new RollupJobCaps(rollupJobConfig); + RollableIndexCaps rollableIndexCaps = new RollableIndexCaps("myIndex_rollup", Collections.singletonList(rollupJobCaps)); + Map jobs = new HashMap<>(1); + jobs.put("rollupJob1", rollableIndexCaps); + when(getRollupIndexResponse.getJobs()).thenReturn(jobs); + } + private void givenAggregatableField(String field, String type) { FieldCapabilities fieldCaps = mock(FieldCapabilities.class); when(fieldCaps.isSearchable()).thenReturn(true); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java index 93bdc125890..6561cfd56e2 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java @@ -46,7 +46,7 @@ import static org.mockito.Mockito.when; public class AggregationDataExtractorTests extends ESTestCase { - private Client client; + private Client testClient; private List capturedSearchRequests; private String jobId; private String timeField; @@ -61,7 +61,7 @@ public class AggregationDataExtractorTests extends ESTestCase { private SearchResponse nextResponse; TestDataExtractor(long start, long end) { - super(client, createContext(start, end)); + super(testClient, createContext(start, end)); } @Override @@ -77,7 +77,7 @@ public class AggregationDataExtractorTests extends ESTestCase { @Before public void setUpTests() { - client = mock(Client.class); + testClient = mock(Client.class); capturedSearchRequests = new ArrayList<>(); jobId = "test-job"; timeField = "time"; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java index e85b1e3a6df..4f0d3ec8e9f 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java @@ -5,7 +5,8 @@ */ package org.elasticsearch.xpack.ml.datafeed.extractor.chunked; -import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.client.Client; @@ -44,7 +45,7 @@ import static org.mockito.Mockito.when; public class ChunkedDataExtractorTests extends ESTestCase { private Client client; - private List capturedSearchRequests; + private List capturedSearchRequests; private String jobId; private String timeField; private List types; @@ -62,9 +63,13 @@ public class ChunkedDataExtractorTests extends ESTestCase { super(client, dataExtractorFactory, createContext(start, end)); } + TestDataExtractor(long start, long end, boolean hasAggregations, Long histogramInterval) { + super(client, dataExtractorFactory, createContext(start, end, hasAggregations, histogramInterval)); + } + @Override - protected SearchResponse executeSearchRequest(SearchRequestBuilder searchRequestBuilder) { - capturedSearchRequests.add(searchRequestBuilder); + protected SearchResponse executeSearchRequest(ActionRequestBuilder searchRequestBuilder) { + capturedSearchRequests.add(searchRequestBuilder.request()); return nextResponse; } @@ -136,6 +141,89 @@ public class ChunkedDataExtractorTests extends ESTestCase { assertThat(searchRequest, not(containsString("\"sort\""))); } + public void testExtractionGivenSpecifiedChunkAndAggs() throws IOException { + chunkSpan = TimeValue.timeValueSeconds(1); + TestDataExtractor extractor = new TestDataExtractor(1000L, 2300L, true, 1000L); + extractor.setNextResponse(createSearchResponse(0L, 1000L, 2200L)); + + InputStream inputStream1 = mock(InputStream.class); + InputStream inputStream2 = mock(InputStream.class); + InputStream inputStream3 = mock(InputStream.class); + + DataExtractor subExtactor1 = new StubSubExtractor(inputStream1, inputStream2); + when(dataExtractorFactory.newExtractor(1000L, 2000L)).thenReturn(subExtactor1); + + DataExtractor subExtactor2 = new StubSubExtractor(inputStream3); + when(dataExtractorFactory.newExtractor(2000L, 2300L)).thenReturn(subExtactor2); + + assertThat(extractor.hasNext(), is(true)); + assertEquals(inputStream1, extractor.next().get()); + assertThat(extractor.hasNext(), is(true)); + assertEquals(inputStream2, extractor.next().get()); + assertThat(extractor.hasNext(), is(true)); + assertEquals(inputStream3, extractor.next().get()); + assertThat(extractor.hasNext(), is(true)); + assertThat(extractor.next().isPresent(), is(false)); + + verify(dataExtractorFactory).newExtractor(1000L, 2000L); + verify(dataExtractorFactory).newExtractor(2000L, 2300L); + Mockito.verifyNoMoreInteractions(dataExtractorFactory); + + assertThat(capturedSearchRequests.size(), equalTo(1)); + String searchRequest = capturedSearchRequests.get(0).toString().replaceAll("\\s", ""); + assertThat(searchRequest, containsString("\"size\":0")); + assertThat(searchRequest, containsString("\"query\":{\"bool\":{\"filter\":[{\"match_all\":{\"boost\":1.0}}," + + "{\"range\":{\"time\":{\"from\":1000,\"to\":2300,\"include_lower\":true,\"include_upper\":false," + + "\"format\":\"epoch_millis\",\"boost\":1.0}}}]")); + assertThat(searchRequest, containsString("\"aggregations\":{\"earliest_time\":{\"min\":{\"field\":\"time\"}}," + + "\"latest_time\":{\"max\":{\"field\":\"time\"}}}}")); + assertThat(searchRequest, not(containsString("\"sort\""))); + } + + public void testExtractionGivenAutoChunkAndAggs() throws IOException { + chunkSpan = null; + TestDataExtractor extractor = new TestDataExtractor(100_000L, 450_000L, true, 200L); + + extractor.setNextResponse(createSearchResponse(0L, 100_000L, 400_000L)); + + InputStream inputStream1 = mock(InputStream.class); + InputStream inputStream2 = mock(InputStream.class); + + // 200 * 1_000 == 200_000 + DataExtractor subExtactor1 = new StubSubExtractor(inputStream1); + when(dataExtractorFactory.newExtractor(100_000L, 300_000L)).thenReturn(subExtactor1); + + DataExtractor subExtactor2 = new StubSubExtractor(inputStream2); + when(dataExtractorFactory.newExtractor(300_000L, 450_000L)).thenReturn(subExtactor2); + + assertThat(extractor.hasNext(), is(true)); + assertEquals(inputStream1, extractor.next().get()); + assertThat(extractor.hasNext(), is(true)); + assertEquals(inputStream2, extractor.next().get()); + assertThat(extractor.next().isPresent(), is(false)); + assertThat(extractor.hasNext(), is(false)); + + verify(dataExtractorFactory).newExtractor(100_000L, 300_000L); + verify(dataExtractorFactory).newExtractor(300_000L, 450_000L); + Mockito.verifyNoMoreInteractions(dataExtractorFactory); + + assertThat(capturedSearchRequests.size(), equalTo(1)); + } + + public void testExtractionGivenAutoChunkAndAggsAndNoData() throws IOException { + chunkSpan = null; + TestDataExtractor extractor = new TestDataExtractor(100L, 500L, true, 200L); + + extractor.setNextResponse(createNullSearchResponse()); + + assertThat(extractor.next().isPresent(), is(false)); + assertThat(extractor.hasNext(), is(false)); + + Mockito.verifyNoMoreInteractions(dataExtractorFactory); + + assertThat(capturedSearchRequests.size(), equalTo(1)); + } + public void testExtractionGivenAutoChunkAndScrollSize1000() throws IOException { chunkSpan = null; scrollSize = 1000; @@ -430,6 +518,27 @@ public class ChunkedDataExtractorTests extends ESTestCase { return searchResponse; } + private SearchResponse createNullSearchResponse() { + SearchResponse searchResponse = mock(SearchResponse.class); + when(searchResponse.status()).thenReturn(RestStatus.OK); + SearchHit[] hits = new SearchHit[0]; + SearchHits searchHits = new SearchHits(hits, 0, 1); + when(searchResponse.getHits()).thenReturn(searchHits); + + List aggs = new ArrayList<>(); + Min min = mock(Min.class); + when(min.getValue()).thenReturn(Double.POSITIVE_INFINITY); + when(min.getName()).thenReturn("earliest_time"); + aggs.add(min); + Max max = mock(Max.class); + when(max.getValue()).thenReturn(Double.POSITIVE_INFINITY); + when(max.getName()).thenReturn("latest_time"); + aggs.add(max); + Aggregations aggregations = new Aggregations(aggs) {}; + when(searchResponse.getAggregations()).thenReturn(aggregations); + return searchResponse; + } + private SearchResponse createErrorResponse() { SearchResponse searchResponse = mock(SearchResponse.class); when(searchResponse.status()).thenReturn(RestStatus.INTERNAL_SERVER_ERROR); @@ -445,8 +554,12 @@ public class ChunkedDataExtractorTests extends ESTestCase { } private ChunkedDataExtractorContext createContext(long start, long end) { + return createContext(start, end, false, null); + } + + private ChunkedDataExtractorContext createContext(long start, long end, boolean hasAggregations, Long histogramInterval) { return new ChunkedDataExtractorContext(jobId, timeField, indices, types, query, scrollSize, start, end, chunkSpan, - ChunkedDataExtractorFactory.newIdentityTimeAligner(), Collections.emptyMap()); + ChunkedDataExtractorFactory.newIdentityTimeAligner(), Collections.emptyMap(), hasAggregations, histogramInterval); } private static class StubSubExtractor implements DataExtractor { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java index 756eeb8626d..3191253f806 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java @@ -103,7 +103,7 @@ public class AutodetectResultProcessorIT extends MlSingleNodeTestCase { renormalizer = mock(Renormalizer.class); capturedUpdateModelSnapshotOnJobRequests = new ArrayList<>(); resultProcessor = new AutoDetectResultProcessor(client(), auditor, JOB_ID, renormalizer, - new JobResultsPersister(nodeSettings(), client()), jobResultsProvider, new ModelSizeStats.Builder(JOB_ID).build(), false) { + new JobResultsPersister(client()), jobResultsProvider, new ModelSizeStats.Builder(JOB_ID).build(), false) { @Override protected void updateModelSnapshotOnJob(ModelSnapshot modelSnapshot) { capturedUpdateModelSnapshotOnJobRequests.add(modelSnapshot); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/EstablishedMemUsageIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/EstablishedMemUsageIT.java index df3af13f714..a026d5d6c33 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/EstablishedMemUsageIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/EstablishedMemUsageIT.java @@ -33,7 +33,7 @@ public class EstablishedMemUsageIT extends BaseMlIntegTestCase { public void createComponents() { Settings settings = nodeSettings(0); jobResultsProvider = new JobResultsProvider(client(), settings); - jobResultsPersister = new JobResultsPersister(settings, client()); + jobResultsPersister = new JobResultsPersister(client()); } public void testEstablishedMem_givenNoResults() throws Exception { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java index e36c313b626..dcbed9986a8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java @@ -542,7 +542,7 @@ public class JobResultsProviderIT extends MlSingleNodeTestCase { } private void indexDataCounts(DataCounts counts, String jobId) throws Exception { - JobDataCountsPersister persister = new JobDataCountsPersister(nodeSettings(), client()); + JobDataCountsPersister persister = new JobDataCountsPersister(client()); AtomicReference errorHolder = new AtomicReference<>(); CountDownLatch latch = new CountDownLatch(1); @@ -582,17 +582,17 @@ public class JobResultsProviderIT extends MlSingleNodeTestCase { } private void indexModelSizeStats(ModelSizeStats modelSizeStats) { - JobResultsPersister persister = new JobResultsPersister(nodeSettings(), client()); + JobResultsPersister persister = new JobResultsPersister(client()); persister.persistModelSizeStats(modelSizeStats); } private void indexModelSnapshot(ModelSnapshot snapshot) { - JobResultsPersister persister = new JobResultsPersister(nodeSettings(), client()); + JobResultsPersister persister = new JobResultsPersister(client()); persister.persistModelSnapshot(snapshot, WriteRequest.RefreshPolicy.IMMEDIATE); } private void indexQuantiles(Quantiles quantiles) { - JobResultsPersister persister = new JobResultsPersister(nodeSettings(), client()); + JobResultsPersister persister = new JobResultsPersister(client()); persister.persistQuantiles(quantiles); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/NetworkDisruptionIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/NetworkDisruptionIT.java index beaf8a45c15..67138cde5bd 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/NetworkDisruptionIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/NetworkDisruptionIT.java @@ -25,7 +25,6 @@ import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles; import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase; -import org.elasticsearch.test.junit.annotations.TestLogging; import java.util.ArrayList; import java.util.Arrays; @@ -54,7 +53,6 @@ public class NetworkDisruptionIT extends BaseMlIntegTestCase { return plugins; } - @TestLogging("org.elasticsearch.discovery.zen:TRACE") public void testJobRelocation() throws Exception { internalCluster().ensureAtLeastNumDataNodes(5); ensureStableCluster(5); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobRenormalizedResultsPersisterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobRenormalizedResultsPersisterTests.java index b1ec3008d33..f9bacee003c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobRenormalizedResultsPersisterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobRenormalizedResultsPersisterTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.job.persistence; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.ml.job.process.normalizer.BucketNormalizable; import org.elasticsearch.xpack.core.ml.job.results.Bucket; @@ -45,7 +44,7 @@ public class JobRenormalizedResultsPersisterTests extends ESTestCase { public void testBulkRequestExecutesWhenReachMaxDocs() { BulkResponse bulkResponse = mock(BulkResponse.class); Client client = new MockClientBuilder("cluster").bulk(bulkResponse).build(); - JobRenormalizedResultsPersister persister = new JobRenormalizedResultsPersister("foo", Settings.EMPTY, client); + JobRenormalizedResultsPersister persister = new JobRenormalizedResultsPersister("foo", client); ModelPlot modelPlot = new ModelPlot("foo", new Date(), 123456, 0); for (int i=0; i<=JobRenormalizedResultsPersister.BULK_LIMIT; i++) { @@ -62,7 +61,7 @@ public class JobRenormalizedResultsPersisterTests extends ESTestCase { when(bulkResponse.hasFailures()).thenReturn(false); Client client = new MockClientBuilder("cluster").bulk(bulkResponse).build(); - return new JobRenormalizedResultsPersister("foo", Settings.EMPTY, client); + return new JobRenormalizedResultsPersister("foo", client); } private BucketNormalizable createBucketNormalizable() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java index c31ebd4bc2c..0035531d55a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java @@ -61,7 +61,7 @@ public class JobResultsPersisterTests extends ESTestCase { AnomalyRecord record = new AnomalyRecord(JOB_ID, new Date(), 600); bucket.setRecords(Collections.singletonList(record)); - JobResultsPersister persister = new JobResultsPersister(Settings.EMPTY, client); + JobResultsPersister persister = new JobResultsPersister(client); persister.bulkPersisterBuilder(JOB_ID).persistBucket(bucket).executeRequest(); BulkRequest bulkRequest = captor.getValue(); assertEquals(2, bulkRequest.numberOfActions()); @@ -113,7 +113,7 @@ public class JobResultsPersisterTests extends ESTestCase { typicals.add(998765.3); r1.setTypical(typicals); - JobResultsPersister persister = new JobResultsPersister(Settings.EMPTY, client); + JobResultsPersister persister = new JobResultsPersister(client); persister.bulkPersisterBuilder(JOB_ID).persistRecords(records).executeRequest(); BulkRequest bulkRequest = captor.getValue(); assertEquals(1, bulkRequest.numberOfActions()); @@ -149,7 +149,7 @@ public class JobResultsPersisterTests extends ESTestCase { inf.setProbability(0.4); influencers.add(inf); - JobResultsPersister persister = new JobResultsPersister(Settings.EMPTY, client); + JobResultsPersister persister = new JobResultsPersister(client); persister.bulkPersisterBuilder(JOB_ID).persistInfluencers(influencers).executeRequest(); BulkRequest bulkRequest = captor.getValue(); assertEquals(1, bulkRequest.numberOfActions()); @@ -165,7 +165,7 @@ public class JobResultsPersisterTests extends ESTestCase { public void testExecuteRequest_ClearsBulkRequest() { ArgumentCaptor captor = ArgumentCaptor.forClass(BulkRequest.class); Client client = mockClient(captor); - JobResultsPersister persister = new JobResultsPersister(Settings.EMPTY, client); + JobResultsPersister persister = new JobResultsPersister(client); List influencers = new ArrayList<>(); Influencer inf = new Influencer(JOB_ID, "infName1", "infValue1", new Date(), 600); @@ -182,7 +182,7 @@ public class JobResultsPersisterTests extends ESTestCase { public void testBulkRequestExecutesWhenReachMaxDocs() { ArgumentCaptor captor = ArgumentCaptor.forClass(BulkRequest.class); Client client = mockClient(captor); - JobResultsPersister persister = new JobResultsPersister(Settings.EMPTY, client); + JobResultsPersister persister = new JobResultsPersister(client); JobResultsPersister.Builder bulkBuilder = persister.bulkPersisterBuilder("foo"); ModelPlot modelPlot = new ModelPlot("foo", new Date(), 123456, 0); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporterTests.java index 4be8d74274c..5415b460191 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporterTests.java @@ -6,9 +6,7 @@ package org.elasticsearch.xpack.ml.job.process; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; @@ -34,14 +32,10 @@ public class DataCountsReporterTests extends ESTestCase { private Job job; private JobDataCountsPersister jobDataCountsPersister; - private Settings settings; private TimeValue bucketSpan = TimeValue.timeValueSeconds(300); @Before public void setUpMocks() { - settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build(); - AnalysisConfig.Builder acBuilder = new AnalysisConfig.Builder(Arrays.asList(new Detector.Builder("metric", "field").build())); acBuilder.setBucketSpan(bucketSpan); acBuilder.setLatency(TimeValue.ZERO); @@ -57,8 +51,7 @@ public class DataCountsReporterTests extends ESTestCase { } public void testSimpleConstructor() throws Exception { - DataCountsReporter dataCountsReporter = new DataCountsReporter(settings, job, new DataCounts(job.getId()), - jobDataCountsPersister); + DataCountsReporter dataCountsReporter = new DataCountsReporter(job, new DataCounts(job.getId()), jobDataCountsPersister); DataCounts stats = dataCountsReporter.incrementalStats(); assertNotNull(stats); assertAllCountFieldsEqualZero(stats); @@ -68,8 +61,7 @@ public class DataCountsReporterTests extends ESTestCase { DataCounts counts = new DataCounts("foo", 1L, 1L, 2L, 0L, 3L, 4L, 5L, 6L, 7L, 8L, new Date(), new Date(), new Date(), new Date(), new Date()); - DataCountsReporter dataCountsReporter = - new DataCountsReporter(settings, job, counts, jobDataCountsPersister); + DataCountsReporter dataCountsReporter = new DataCountsReporter(job, counts, jobDataCountsPersister); DataCounts stats = dataCountsReporter.incrementalStats(); assertNotNull(stats); assertAllCountFieldsEqualZero(stats); @@ -86,8 +78,7 @@ public class DataCountsReporterTests extends ESTestCase { } public void testResetIncrementalCounts() throws Exception { - DataCountsReporter dataCountsReporter = new DataCountsReporter(settings, job, new DataCounts(job.getId()), - jobDataCountsPersister); + DataCountsReporter dataCountsReporter = new DataCountsReporter(job, new DataCounts(job.getId()), jobDataCountsPersister); DataCounts stats = dataCountsReporter.incrementalStats(); assertNotNull(stats); assertAllCountFieldsEqualZero(stats); @@ -139,16 +130,14 @@ public class DataCountsReporterTests extends ESTestCase { } public void testReportLatestTimeIncrementalStats() throws IOException { - DataCountsReporter dataCountsReporter = new DataCountsReporter(settings, job, new DataCounts(job.getId()), - jobDataCountsPersister); + DataCountsReporter dataCountsReporter = new DataCountsReporter(job, new DataCounts(job.getId()), jobDataCountsPersister); dataCountsReporter.startNewIncrementalCount(); dataCountsReporter.reportLatestTimeIncrementalStats(5001L); assertEquals(5001L, dataCountsReporter.incrementalStats().getLatestRecordTimeStamp().getTime()); } public void testReportRecordsWritten() { - DataCountsReporter dataCountsReporter = new DataCountsReporter(settings, job, new DataCounts(job.getId()), - jobDataCountsPersister); + DataCountsReporter dataCountsReporter = new DataCountsReporter(job, new DataCounts(job.getId()), jobDataCountsPersister); dataCountsReporter.setAnalysedFieldsPerRecord(3); dataCountsReporter.reportRecordWritten(5, 2000); @@ -258,8 +247,7 @@ public class DataCountsReporterTests extends ESTestCase { public void testFinishReporting() { - DataCountsReporter dataCountsReporter = new DataCountsReporter(settings, job, new DataCounts(job.getId()), - jobDataCountsPersister); + DataCountsReporter dataCountsReporter = new DataCountsReporter(job, new DataCounts(job.getId()), jobDataCountsPersister); dataCountsReporter.setAnalysedFieldsPerRecord(3); Date now = new Date(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DummyDataCountsReporter.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DummyDataCountsReporter.java index 98ab4025bff..3967e83f5ec 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DummyDataCountsReporter.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DummyDataCountsReporter.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.ml.job.process; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; @@ -27,8 +26,7 @@ class DummyDataCountsReporter extends DataCountsReporter { int logStatusCallCount = 0; DummyDataCountsReporter() { - super(Settings.EMPTY, createJob(), new DataCounts("DummyJobId"), - mock(JobDataCountsPersister.class)); + super(createJob(), new DataCounts("DummyJobId"), mock(JobDataCountsPersister.class)); } /** diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessTests.java index 6d5adeb3fdb..18ee9434f0d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessTests.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.ml.job.process.autodetect; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.job.config.ModelPlotConfig; import org.elasticsearch.xpack.ml.job.process.autodetect.output.AutodetectResultsParser; @@ -55,7 +54,7 @@ public class NativeAutodetectProcessTests extends ESTestCase { try (NativeAutodetectProcess process = new NativeAutodetectProcess("foo", logStream, mock(OutputStream.class), mock(InputStream.class), mock(OutputStream.class), NUMBER_FIELDS, null, - new AutodetectResultsParser(Settings.EMPTY), mock(Runnable.class))) { + new AutodetectResultsParser(), mock(Runnable.class))) { process.start(executorService, mock(AutodetectStateProcessor.class), mock(InputStream.class)); ZonedDateTime startTime = process.getProcessStartTime(); @@ -75,7 +74,7 @@ public class NativeAutodetectProcessTests extends ESTestCase { ByteArrayOutputStream bos = new ByteArrayOutputStream(1024); try (NativeAutodetectProcess process = new NativeAutodetectProcess("foo", logStream, bos, mock(InputStream.class), mock(OutputStream.class), NUMBER_FIELDS, Collections.emptyList(), - new AutodetectResultsParser(Settings.EMPTY), mock(Runnable.class))) { + new AutodetectResultsParser(), mock(Runnable.class))) { process.start(executorService, mock(AutodetectStateProcessor.class), mock(InputStream.class)); process.writeRecord(record); @@ -107,7 +106,7 @@ public class NativeAutodetectProcessTests extends ESTestCase { ByteArrayOutputStream bos = new ByteArrayOutputStream(ControlMsgToProcessWriter.FLUSH_SPACES_LENGTH + 1024); try (NativeAutodetectProcess process = new NativeAutodetectProcess("foo", logStream, bos, mock(InputStream.class), mock(OutputStream.class), NUMBER_FIELDS, Collections.emptyList(), - new AutodetectResultsParser(Settings.EMPTY), mock(Runnable.class))) { + new AutodetectResultsParser(), mock(Runnable.class))) { process.start(executorService, mock(AutodetectStateProcessor.class), mock(InputStream.class)); FlushJobParams params = FlushJobParams.builder().build(); @@ -137,7 +136,7 @@ public class NativeAutodetectProcessTests extends ESTestCase { ByteArrayOutputStream bos = new ByteArrayOutputStream(1024); try (NativeAutodetectProcess process = new NativeAutodetectProcess("foo", logStream, bos, mock(InputStream.class), mock(OutputStream.class), NUMBER_FIELDS, Collections.emptyList(), - new AutodetectResultsParser(Settings.EMPTY), mock(Runnable.class))) { + new AutodetectResultsParser(), mock(Runnable.class))) { process.start(executorService, mock(AutodetectStateProcessor.class), mock(InputStream.class)); writeFunction.accept(process); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultsParserTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultsParserTests.java index d2356a79677..8a725b95abf 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultsParserTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultsParserTests.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.ml.job.process.autodetect.output; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles; @@ -238,7 +237,7 @@ public class AutodetectResultsParserTests extends ESTestCase { public void testParser() throws IOException { InputStream inputStream = new ByteArrayInputStream(METRIC_OUTPUT_SAMPLE.getBytes(StandardCharsets.UTF_8)); - AutodetectResultsParser parser = new AutodetectResultsParser(Settings.EMPTY); + AutodetectResultsParser parser = new AutodetectResultsParser(); List results = new ArrayList<>(); parser.parseResults(inputStream).forEachRemaining(results::add); List buckets = results.stream().map(AutodetectResult::getBucket) @@ -331,7 +330,7 @@ public class AutodetectResultsParserTests extends ESTestCase { @AwaitsFix(bugUrl = "rewrite this test so it doesn't use ~200 lines of json") public void testPopulationParser() throws IOException { InputStream inputStream = new ByteArrayInputStream(POPULATION_OUTPUT_SAMPLE.getBytes(StandardCharsets.UTF_8)); - AutodetectResultsParser parser = new AutodetectResultsParser(Settings.EMPTY); + AutodetectResultsParser parser = new AutodetectResultsParser(); List results = new ArrayList<>(); parser.parseResults(inputStream).forEachRemaining(results::add); List buckets = results.stream().map(AutodetectResult::getBucket) @@ -357,7 +356,7 @@ public class AutodetectResultsParserTests extends ESTestCase { public void testParse_GivenEmptyArray() throws ElasticsearchParseException, IOException { String json = "[]"; InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); - AutodetectResultsParser parser = new AutodetectResultsParser(Settings.EMPTY); + AutodetectResultsParser parser = new AutodetectResultsParser(); assertFalse(parser.parseResults(inputStream).hasNext()); } @@ -365,7 +364,7 @@ public class AutodetectResultsParserTests extends ESTestCase { String json = "[{\"model_size_stats\": {\"job_id\": \"foo\", \"model_bytes\":300}}]"; InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); - AutodetectResultsParser parser = new AutodetectResultsParser(Settings.EMPTY); + AutodetectResultsParser parser = new AutodetectResultsParser(); List results = new ArrayList<>(); parser.parseResults(inputStream).forEachRemaining(results::add); @@ -376,7 +375,7 @@ public class AutodetectResultsParserTests extends ESTestCase { public void testParse_GivenCategoryDefinition() throws IOException { String json = "[{\"category_definition\": {\"job_id\":\"foo\", \"category_id\":18}}]"; InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); - AutodetectResultsParser parser = new AutodetectResultsParser(Settings.EMPTY); + AutodetectResultsParser parser = new AutodetectResultsParser(); List results = new ArrayList<>(); parser.parseResults(inputStream).forEachRemaining(results::add); @@ -387,7 +386,7 @@ public class AutodetectResultsParserTests extends ESTestCase { public void testParse_GivenUnknownObject() throws ElasticsearchParseException, IOException { String json = "[{\"unknown\":{\"id\": 18}}]"; InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); - AutodetectResultsParser parser = new AutodetectResultsParser(Settings.EMPTY); + AutodetectResultsParser parser = new AutodetectResultsParser(); XContentParseException e = expectThrows(XContentParseException.class, () -> parser.parseResults(inputStream).forEachRemaining(a -> {})); assertEquals("[1:3] [autodetect_result] unknown field [unknown], parser not found", e.getMessage()); @@ -396,7 +395,7 @@ public class AutodetectResultsParserTests extends ESTestCase { public void testParse_GivenArrayContainsAnotherArray() throws ElasticsearchParseException, IOException { String json = "[[]]"; InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); - AutodetectResultsParser parser = new AutodetectResultsParser(Settings.EMPTY); + AutodetectResultsParser parser = new AutodetectResultsParser(); ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> parser.parseResults(inputStream).forEachRemaining(a -> {})); assertEquals("unexpected token [START_ARRAY]", e.getMessage()); @@ -411,7 +410,7 @@ public class AutodetectResultsParserTests extends ESTestCase { + "\"by_field_name\":\"airline\",\"by_field_value\":\"JZA\", \"typical\":[1020.08],\"actual\":[0]," + "\"field_name\":\"responsetime\",\"function\":\"max\",\"partition_field_name\":\"\",\"partition_field_value\":\"\"}]}}]"; InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); - AutodetectResultsParser parser = new AutodetectResultsParser(Settings.EMPTY); + AutodetectResultsParser parser = new AutodetectResultsParser(); expectThrows(XContentParseException.class, () -> parser.parseResults(inputStream).forEachRemaining(a -> {})); diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java index 641e25eabf9..2913a0ec9c0 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java @@ -137,11 +137,11 @@ public class Monitoring extends Plugin implements ActionPlugin { threadPool.getThreadContext()); Set collectors = new HashSet<>(); - collectors.add(new IndexStatsCollector(settings, clusterService, getLicenseState(), client)); + collectors.add(new IndexStatsCollector(clusterService, getLicenseState(), client)); collectors.add(new ClusterStatsCollector(settings, clusterService, getLicenseState(), client, getLicenseService())); - collectors.add(new ShardsCollector(settings, clusterService, getLicenseState())); - collectors.add(new NodeStatsCollector(settings, clusterService, getLicenseState(), client)); - collectors.add(new IndexRecoveryCollector(settings, clusterService, getLicenseState(), client)); + collectors.add(new ShardsCollector(clusterService, getLicenseState())); + collectors.add(new NodeStatsCollector(clusterService, getLicenseState(), client)); + collectors.add(new IndexRecoveryCollector(clusterService, getLicenseState(), client)); collectors.add(new JobStatsCollector(settings, clusterService, getLicenseState(), client)); collectors.add(new StatsCollector(settings, clusterService, getLicenseState(), client)); diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/Collector.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/Collector.java index b03ce2001b7..2677e8eb489 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/Collector.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/Collector.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.XPackField; @@ -48,9 +47,8 @@ public abstract class Collector extends AbstractComponent { protected final ClusterService clusterService; protected final XPackLicenseState licenseState; - public Collector(final Settings settings, final String name, final ClusterService clusterService, + public Collector(final String name, final ClusterService clusterService, final Setting timeoutSetting, final XPackLicenseState licenseState) { - super(settings); this.name = name; this.clusterService = clusterService; this.collectionTimeoutSetting = timeoutSetting; diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/StatsCollector.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/StatsCollector.java index 948e4eb8848..bdccb5604a3 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/StatsCollector.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/StatsCollector.java @@ -54,7 +54,7 @@ public final class StatsCollector extends Collector { final XPackLicenseState licenseState, final CcrClient ccrClient, final ThreadContext threadContext) { - super(settings, TYPE, clusterService, CCR_STATS_TIMEOUT, licenseState); + super(TYPE, clusterService, CCR_STATS_TIMEOUT, licenseState); this.settings = settings; this.ccrClient = ccrClient; this.threadContext = threadContext; diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollector.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollector.java index efe7c8ba81a..ac699759482 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollector.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollector.java @@ -65,7 +65,7 @@ public class ClusterStatsCollector extends Collector { final XPackLicenseState licenseState, final Client client, final LicenseService licenseService) { - this(settings, clusterService, licenseState, client, licenseService, new IndexNameExpressionResolver(Settings.EMPTY)); + this(settings, clusterService, licenseState, client, licenseService, new IndexNameExpressionResolver()); } ClusterStatsCollector(final Settings settings, @@ -74,7 +74,7 @@ public class ClusterStatsCollector extends Collector { final Client client, final LicenseService licenseService, final IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, ClusterStatsMonitoringDoc.TYPE, clusterService, CLUSTER_STATS_TIMEOUT, licenseState); + super(ClusterStatsMonitoringDoc.TYPE, clusterService, CLUSTER_STATS_TIMEOUT, licenseState); this.settings = settings; this.client = client; this.licenseService = licenseService; diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollector.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollector.java index ea8b5065387..82d2176d415 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollector.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollector.java @@ -11,7 +11,6 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; @@ -46,12 +45,10 @@ public class IndexRecoveryCollector extends Collector { private final Client client; - public IndexRecoveryCollector(final Settings settings, - final ClusterService clusterService, + public IndexRecoveryCollector(final ClusterService clusterService, final XPackLicenseState licenseState, final Client client) { - - super(settings, IndexRecoveryMonitoringDoc.TYPE, clusterService, INDEX_RECOVERY_TIMEOUT, licenseState); + super(IndexRecoveryMonitoringDoc.TYPE, clusterService, INDEX_RECOVERY_TIMEOUT, licenseState); this.client = Objects.requireNonNull(client); } diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsCollector.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsCollector.java index b6f50ed9e5c..7630aa4794b 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsCollector.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsCollector.java @@ -14,7 +14,6 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; @@ -40,11 +39,10 @@ public class IndexStatsCollector extends Collector { private final Client client; - public IndexStatsCollector(final Settings settings, - final ClusterService clusterService, + public IndexStatsCollector(final ClusterService clusterService, final XPackLicenseState licenseState, final Client client) { - super(settings, "index-stats", clusterService, INDEX_STATS_TIMEOUT, licenseState); + super("index-stats", clusterService, INDEX_STATS_TIMEOUT, licenseState); this.client = client; } diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsCollector.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsCollector.java index 42f6b95a41a..8742e0b645f 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsCollector.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsCollector.java @@ -53,7 +53,7 @@ public class JobStatsCollector extends Collector { JobStatsCollector(final Settings settings, final ClusterService clusterService, final XPackLicenseState licenseState, final MachineLearningClient client, final ThreadContext threadContext) { - super(settings, JobStatsMonitoringDoc.TYPE, clusterService, JOB_STATS_TIMEOUT, licenseState); + super(JobStatsMonitoringDoc.TYPE, clusterService, JOB_STATS_TIMEOUT, licenseState); this.settings = settings; this.client = client; this.threadContext = threadContext; diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/node/NodeStatsCollector.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/node/NodeStatsCollector.java index c990485e6a5..bc816cb9d9a 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/node/NodeStatsCollector.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/node/NodeStatsCollector.java @@ -14,7 +14,6 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; @@ -49,12 +48,10 @@ public class NodeStatsCollector extends Collector { private final Client client; - public NodeStatsCollector(final Settings settings, - final ClusterService clusterService, + public NodeStatsCollector(final ClusterService clusterService, final XPackLicenseState licenseState, final Client client) { - - super(settings, NodeStatsMonitoringDoc.TYPE, clusterService, NODE_STATS_TIMEOUT, licenseState); + super(NodeStatsMonitoringDoc.TYPE, clusterService, NODE_STATS_TIMEOUT, licenseState); this.client = Objects.requireNonNull(client); } diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/shards/ShardsCollector.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/shards/ShardsCollector.java index b5a3a2920e2..eda782913ad 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/shards/ShardsCollector.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/shards/ShardsCollector.java @@ -11,7 +11,6 @@ import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; import org.elasticsearch.xpack.monitoring.collector.Collector; @@ -30,11 +29,9 @@ import java.util.List; */ public class ShardsCollector extends Collector { - public ShardsCollector(final Settings settings, - final ClusterService clusterService, + public ShardsCollector(final ClusterService clusterService, final XPackLicenseState licenseState) { - - super(settings, ShardMonitoringDoc.TYPE, clusterService, null, licenseState); + super(ShardMonitoringDoc.TYPE, clusterService, null, licenseState); } @Override diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollectorTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollectorTests.java index f4484aa5ed7..4e251711298 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollectorTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollectorTests.java @@ -17,7 +17,6 @@ import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.recovery.RecoveryState; @@ -55,7 +54,7 @@ public class IndexRecoveryCollectorTests extends BaseCollectorTestCase { final boolean isElectedMaster = randomBoolean(); whenLocalNodeElectedMaster(isElectedMaster); - final IndexRecoveryCollector collector = new IndexRecoveryCollector(Settings.EMPTY, clusterService, licenseState, client); + final IndexRecoveryCollector collector = new IndexRecoveryCollector(clusterService, licenseState, client); assertThat(collector.shouldCollect(isElectedMaster), is(false)); if (isElectedMaster) { @@ -65,14 +64,14 @@ public class IndexRecoveryCollectorTests extends BaseCollectorTestCase { public void testShouldCollectReturnsFalseIfNotMaster() { when(licenseState.isMonitoringAllowed()).thenReturn(true); - final IndexRecoveryCollector collector = new IndexRecoveryCollector(Settings.EMPTY, clusterService, licenseState, client); + final IndexRecoveryCollector collector = new IndexRecoveryCollector(clusterService, licenseState, client); assertThat(collector.shouldCollect(false), is(false)); } public void testShouldCollectReturnsTrue() { when(licenseState.isMonitoringAllowed()).thenReturn(true); - final IndexRecoveryCollector collector = new IndexRecoveryCollector(Settings.EMPTY, clusterService, licenseState, client); + final IndexRecoveryCollector collector = new IndexRecoveryCollector(clusterService, licenseState, client); assertThat(collector.shouldCollect(true), is(true)); verify(licenseState).isMonitoringAllowed(); @@ -138,7 +137,7 @@ public class IndexRecoveryCollectorTests extends BaseCollectorTestCase { final Client client = mock(Client.class); when(client.admin()).thenReturn(adminClient); - final IndexRecoveryCollector collector = new IndexRecoveryCollector(Settings.EMPTY, clusterService, licenseState, client); + final IndexRecoveryCollector collector = new IndexRecoveryCollector(clusterService, licenseState, client); assertEquals(timeout, collector.getCollectionTimeout()); assertEquals(recoveryOnly, collector.getActiveRecoveriesOnly()); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsCollectorTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsCollectorTests.java index ef96726aaee..c41ce1ec913 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsCollectorTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsCollectorTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; @@ -50,7 +49,7 @@ public class IndexStatsCollectorTests extends BaseCollectorTestCase { final boolean isElectedMaster = randomBoolean(); whenLocalNodeElectedMaster(isElectedMaster); - final IndexStatsCollector collector = new IndexStatsCollector(Settings.EMPTY, clusterService, licenseState, client); + final IndexStatsCollector collector = new IndexStatsCollector(clusterService, licenseState, client); assertThat(collector.shouldCollect(isElectedMaster), is(false)); if (isElectedMaster) { @@ -60,14 +59,14 @@ public class IndexStatsCollectorTests extends BaseCollectorTestCase { public void testShouldCollectReturnsFalseIfNotMaster() { when(licenseState.isMonitoringAllowed()).thenReturn(true); - final IndexStatsCollector collector = new IndexStatsCollector(Settings.EMPTY, clusterService, licenseState, client); + final IndexStatsCollector collector = new IndexStatsCollector(clusterService, licenseState, client); assertThat(collector.shouldCollect(false), is(false)); } public void testShouldCollectReturnsTrue() { when(licenseState.isMonitoringAllowed()).thenReturn(true); - final IndexStatsCollector collector = new IndexStatsCollector(Settings.EMPTY, clusterService, licenseState, client); + final IndexStatsCollector collector = new IndexStatsCollector(clusterService, licenseState, client); assertThat(collector.shouldCollect(true), is(true)); verify(licenseState).isMonitoringAllowed(); @@ -144,7 +143,7 @@ public class IndexStatsCollectorTests extends BaseCollectorTestCase { final Client client = mock(Client.class); when(client.admin()).thenReturn(adminClient); - final IndexStatsCollector collector = new IndexStatsCollector(Settings.EMPTY, clusterService, licenseState, client); + final IndexStatsCollector collector = new IndexStatsCollector(clusterService, licenseState, client); assertEquals(timeout, collector.getCollectionTimeout()); final long interval = randomNonNegativeLong(); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/node/NodeStatsCollectorTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/node/NodeStatsCollectorTests.java index 03692cc9d53..4aca7fcdc60 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/node/NodeStatsCollectorTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/node/NodeStatsCollectorTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.bootstrap.BootstrapInfo; import org.elasticsearch.client.AdminClient; import org.elasticsearch.client.Client; import org.elasticsearch.client.ClusterAdminClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; @@ -43,7 +42,7 @@ public class NodeStatsCollectorTests extends BaseCollectorTestCase { final boolean isElectedMaster = randomBoolean(); whenLocalNodeElectedMaster(isElectedMaster); - final NodeStatsCollector collector = new NodeStatsCollector(Settings.EMPTY, clusterService, licenseState, client); + final NodeStatsCollector collector = new NodeStatsCollector(clusterService, licenseState, client); assertThat(collector.shouldCollect(isElectedMaster), is(false)); if (isElectedMaster) { @@ -55,7 +54,7 @@ public class NodeStatsCollectorTests extends BaseCollectorTestCase { when(licenseState.isMonitoringAllowed()).thenReturn(true); final boolean isElectedMaster = true; - final NodeStatsCollector collector = new NodeStatsCollector(Settings.EMPTY, clusterService, licenseState, client); + final NodeStatsCollector collector = new NodeStatsCollector(clusterService, licenseState, client); assertThat(collector.shouldCollect(isElectedMaster), is(true)); verify(licenseState).isMonitoringAllowed(); @@ -76,7 +75,7 @@ public class NodeStatsCollectorTests extends BaseCollectorTestCase { final Client client = mock(Client.class); thenReturnNodeStats(client, timeout, nodesStatsResponse); - final NodeStatsCollector collector = new NodeStatsCollector(Settings.EMPTY, clusterService, licenseState, client); + final NodeStatsCollector collector = new NodeStatsCollector(clusterService, licenseState, client); assertEquals(timeout, collector.getCollectionTimeout()); final FailedNodeException e = expectThrows(FailedNodeException.class, () -> @@ -110,7 +109,7 @@ public class NodeStatsCollectorTests extends BaseCollectorTestCase { final Client client = mock(Client.class); thenReturnNodeStats(client, timeout, nodesStatsResponse); - final NodeStatsCollector collector = new NodeStatsCollector(Settings.EMPTY, clusterService, licenseState, client); + final NodeStatsCollector collector = new NodeStatsCollector(clusterService, licenseState, client); assertEquals(timeout, collector.getCollectionTimeout()); final long interval = randomNonNegativeLong(); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/shards/ShardsCollectorTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/shards/ShardsCollectorTests.java index 4affc3a1643..83c09800214 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/shards/ShardsCollectorTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/shards/ShardsCollectorTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; @@ -48,7 +47,7 @@ public class ShardsCollectorTests extends BaseCollectorTestCase { final boolean isElectedMaster = randomBoolean(); whenLocalNodeElectedMaster(isElectedMaster); - final ShardsCollector collector = new ShardsCollector(Settings.EMPTY, clusterService, licenseState); + final ShardsCollector collector = new ShardsCollector(clusterService, licenseState); assertThat(collector.shouldCollect(isElectedMaster), is(false)); if (isElectedMaster) { @@ -61,7 +60,7 @@ public class ShardsCollectorTests extends BaseCollectorTestCase { // this controls the blockage whenLocalNodeElectedMaster(false); - final ShardsCollector collector = new ShardsCollector(Settings.EMPTY, clusterService, licenseState); + final ShardsCollector collector = new ShardsCollector(clusterService, licenseState); assertThat(collector.shouldCollect(false), is(false)); } @@ -70,14 +69,14 @@ public class ShardsCollectorTests extends BaseCollectorTestCase { when(licenseState.isMonitoringAllowed()).thenReturn(true); whenLocalNodeElectedMaster(true); - final ShardsCollector collector = new ShardsCollector(Settings.EMPTY, clusterService, licenseState); + final ShardsCollector collector = new ShardsCollector(clusterService, licenseState); assertThat(collector.shouldCollect(true), is(true)); verify(licenseState).isMonitoringAllowed(); } public void testDoCollectWhenNoClusterState() throws Exception { - final ShardsCollector collector = new ShardsCollector(Settings.EMPTY, clusterService, licenseState); + final ShardsCollector collector = new ShardsCollector(clusterService, licenseState); final Collection results = collector.doCollect(randomMonitoringNode(random()), randomNonNegativeLong(), null); assertThat(results, notNullValue()); @@ -105,7 +104,7 @@ public class ShardsCollectorTests extends BaseCollectorTestCase { when(nodes.get(eq("_current"))).thenReturn(localNode); when(clusterState.getNodes()).thenReturn(nodes); - final ShardsCollector collector = new ShardsCollector(Settings.EMPTY, clusterService, licenseState); + final ShardsCollector collector = new ShardsCollector(clusterService, licenseState); assertNull(collector.getCollectionTimeout()); assertArrayEquals(indices, collector.getCollectionIndices()); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java index 09b2ccd079a..62bb18ee033 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java @@ -195,7 +195,7 @@ public class Rollup extends Plugin implements ActionPlugin, PersistentTaskPlugin } SchedulerEngine schedulerEngine = new SchedulerEngine(settings, getClock()); - return Collections.singletonList(new RollupJobTask.RollupJobPersistentTasksExecutor(settings, client, schedulerEngine, threadPool)); + return Collections.singletonList(new RollupJobTask.RollupJobPersistentTasksExecutor(client, schedulerEngine, threadPool)); } // overridable by tests diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupIndexCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupIndexCapsAction.java index 518fe7ec29f..b3348a08a7b 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupIndexCapsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupIndexCapsAction.java @@ -46,7 +46,7 @@ public class TransportGetRollupIndexCapsAction extends HandledTransportAction listener) { - IndexNameExpressionResolver resolver = new IndexNameExpressionResolver(clusterService.getSettings()); + IndexNameExpressionResolver resolver = new IndexNameExpressionResolver(); String[] indices = resolver.concreteIndexNames(clusterService.state(), request.indicesOptions(), request.indices()); Map allCaps = getCapsByRollupIndex(Arrays.asList(indices), diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java index 70660a8bfb4..20c8e17c030 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java @@ -101,7 +101,7 @@ public class TransportRollupSearchAction extends TransportAction listener) { - IndexNameExpressionResolver resolver = new IndexNameExpressionResolver(clusterService.getSettings()); + IndexNameExpressionResolver resolver = new IndexNameExpressionResolver(); String[] indices = resolver.concreteIndexNames(clusterService.state(), request.indicesOptions(), request.indices()); RollupSearchContext rollupSearchContext = separateIndices(indices, clusterService.state().getMetaData().indices()); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java index 3fbe77b64b4..f545ab049d4 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java @@ -17,7 +17,6 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.ParentTaskAssigningClient; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.persistent.AllocatedPersistentTask; import org.elasticsearch.persistent.PersistentTaskState; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; @@ -56,8 +55,8 @@ public class RollupJobTask extends AllocatedPersistentTask implements SchedulerE private final SchedulerEngine schedulerEngine; private final ThreadPool threadPool; - public RollupJobPersistentTasksExecutor(Settings settings, Client client, SchedulerEngine schedulerEngine, ThreadPool threadPool) { - super(settings, RollupField.TASK_NAME, Rollup.TASK_THREAD_POOL_NAME); + public RollupJobPersistentTasksExecutor(Client client, SchedulerEngine schedulerEngine, ThreadPool threadPool) { + super(RollupField.TASK_NAME, Rollup.TASK_THREAD_POOL_NAME); this.client = client; this.schedulerEngine = schedulerEngine; this.threadPool = threadPool; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index b77b17ff602..b0c1acba7e2 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -344,7 +344,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw b.bind(CompositeRolesStore.class).toProvider(Providers.of(null)); // for SecurityFeatureSet b.bind(NativeRoleMappingStore.class).toProvider(Providers.of(null)); // for SecurityFeatureSet b.bind(AuditTrailService.class) - .toInstance(new AuditTrailService(settings, Collections.emptyList(), getLicenseState())); + .toInstance(new AuditTrailService(Collections.emptyList(), getLicenseState())); }); return modules; } @@ -415,7 +415,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw } } final AuditTrailService auditTrailService = - new AuditTrailService(settings, new ArrayList<>(auditTrails), getLicenseState()); + new AuditTrailService(new ArrayList<>(auditTrails), getLicenseState()); components.add(auditTrailService); this.auditTrailService.set(auditTrailService); @@ -487,16 +487,16 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw final Set requestInterceptors; if (XPackSettings.DLS_FLS_ENABLED.get(settings)) { requestInterceptors = Collections.unmodifiableSet(Sets.newHashSet( - new SearchRequestInterceptor(settings, threadPool, getLicenseState()), - new UpdateRequestInterceptor(settings, threadPool, getLicenseState()), - new BulkShardRequestInterceptor(settings, threadPool, getLicenseState()), - new ResizeRequestInterceptor(settings, threadPool, getLicenseState(), auditTrailService), + new SearchRequestInterceptor(threadPool, getLicenseState()), + new UpdateRequestInterceptor(threadPool, getLicenseState()), + new BulkShardRequestInterceptor(threadPool, getLicenseState()), + new ResizeRequestInterceptor(threadPool, getLicenseState(), auditTrailService), new IndicesAliasesRequestInterceptor(threadPool.getThreadContext(), getLicenseState(), auditTrailService))); } else { requestInterceptors = Collections.emptySet(); } - securityActionFilter.set(new SecurityActionFilter(settings, authcService.get(), authzService, getLicenseState(), + securityActionFilter.set(new SecurityActionFilter(authcService.get(), authzService, getLicenseState(), requestInterceptors, threadPool, securityContext.get(), destructiveOperations)); return components; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java index 3e1f9f97c2f..8252c7f9a9d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java @@ -19,7 +19,6 @@ import org.elasticsearch.action.support.ActionFilterChain; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; @@ -55,10 +54,9 @@ public class SecurityActionFilter extends AbstractComponent implements ActionFil private final SecurityContext securityContext; private final DestructiveOperations destructiveOperations; - public SecurityActionFilter(Settings settings, AuthenticationService authcService, AuthorizationService authzService, + public SecurityActionFilter(AuthenticationService authcService, AuthorizationService authzService, XPackLicenseState licenseState, Set requestInterceptors, ThreadPool threadPool, SecurityContext securityContext, DestructiveOperations destructiveOperations) { - super(settings); this.authcService = authcService; this.authzService = authzService; this.licenseState = licenseState; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/BulkShardRequestInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/BulkShardRequestInterceptor.java index abdaba7cf29..5732e187377 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/BulkShardRequestInterceptor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/BulkShardRequestInterceptor.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.bulk.BulkItemRequest; import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestStatus; @@ -29,8 +28,7 @@ public class BulkShardRequestInterceptor extends AbstractComponent implements Re private final ThreadContext threadContext; private final XPackLicenseState licenseState; - public BulkShardRequestInterceptor(Settings settings, ThreadPool threadPool, XPackLicenseState licenseState) { - super(settings); + public BulkShardRequestInterceptor(ThreadPool threadPool, XPackLicenseState licenseState) { this.threadContext = threadPool.getThreadContext(); this.licenseState = licenseState; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/FieldAndDocumentLevelSecurityRequestInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/FieldAndDocumentLevelSecurityRequestInterceptor.java index 5f6f4d1643b..fb8575ce130 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/FieldAndDocumentLevelSecurityRequestInterceptor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/FieldAndDocumentLevelSecurityRequestInterceptor.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.security.action.interceptor; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -25,9 +24,7 @@ abstract class FieldAndDocumentLevelSecurityRequestInterceptor { - public SearchRequestInterceptor(Settings settings, ThreadPool threadPool, XPackLicenseState licenseState) { - super(settings, threadPool.getThreadContext(), licenseState); + public SearchRequestInterceptor(ThreadPool threadPool, XPackLicenseState licenseState) { + super(threadPool.getThreadContext(), licenseState); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/UpdateRequestInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/UpdateRequestInterceptor.java index 40b63d943d8..db265333e69 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/UpdateRequestInterceptor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/UpdateRequestInterceptor.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.security.action.interceptor; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.update.UpdateRequest; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; @@ -22,8 +21,8 @@ import org.elasticsearch.transport.TransportRequest; */ public class UpdateRequestInterceptor extends FieldAndDocumentLevelSecurityRequestInterceptor { - public UpdateRequestInterceptor(Settings settings, ThreadPool threadPool, XPackLicenseState licenseState) { - super(settings, threadPool.getThreadContext(), licenseState); + public UpdateRequestInterceptor(ThreadPool threadPool, XPackLicenseState licenseState) { + super(threadPool.getThreadContext(), licenseState); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditTrailService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditTrailService.java index e36dee3d67c..aa861ce6857 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditTrailService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditTrailService.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.security.audit; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.transport.TransportMessage; @@ -29,8 +28,7 @@ public class AuditTrailService extends AbstractComponent implements AuditTrail { return "service"; } - public AuditTrailService(Settings settings, List auditTrails, XPackLicenseState licenseState) { - super(settings); + public AuditTrailService(List auditTrails, XPackLicenseState licenseState) { this.auditTrails = Collections.unmodifiableList(auditTrails); this.licenseState = licenseState; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrail.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrail.java index c20d73c84a0..9bb0f0697ac 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrail.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrail.java @@ -186,7 +186,6 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail, Cl } public IndexAuditTrail(Settings settings, Client client, ThreadPool threadPool, ClusterService clusterService) { - super(settings); this.settings = settings; this.threadPool = threadPool; this.clusterService = clusterService; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java index 05665d2f6a0..dabe62c9bc0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java @@ -168,7 +168,6 @@ public class LoggingAuditTrail extends AbstractComponent implements AuditTrail, } LoggingAuditTrail(Settings settings, ClusterService clusterService, Logger logger, ThreadContext threadContext) { - super(settings); this.logger = logger; this.events = parse(INCLUDE_EVENT_SETTINGS.get(settings), EXCLUDE_EVENT_SETTINGS.get(settings)); this.includeRequestBody = INCLUDE_REQUEST_BODY.get(settings); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java index d5242fab45f..cdeb44608d4 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java @@ -64,7 +64,6 @@ public class AuthenticationService extends AbstractComponent { public AuthenticationService(Settings settings, Realms realms, AuditTrailService auditTrail, AuthenticationFailureHandler failureHandler, ThreadPool threadPool, AnonymousUser anonymousUser, TokenService tokenService) { - super(settings); this.nodeName = Node.NODE_NAME_SETTING.get(settings); this.realms = realms; this.auditTrail = auditTrail; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/Realms.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/Realms.java index ea086ba57e5..c090e86fe7a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/Realms.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/Realms.java @@ -59,7 +59,6 @@ public class Realms extends AbstractComponent implements Iterable { public Realms(Settings settings, Environment env, Map factories, XPackLicenseState licenseState, ThreadContext threadContext, ReservedRealm reservedRealm) throws Exception { - super(settings); this.settings = settings; this.env = env; this.factories = factories; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index ff9ee0fc4b8..3ce0906a4e2 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -184,7 +184,6 @@ public final class TokenService extends AbstractComponent { */ public TokenService(Settings settings, Clock clock, Client client, SecurityIndexManager securityIndex, ClusterService clusterService) throws GeneralSecurityException { - super(settings); byte[] saltArr = new byte[SALT_BYTES]; secureRandom.nextBytes(saltArr); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java index 508b240afef..1efc903dfc7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java @@ -88,7 +88,6 @@ public class NativeUsersStore extends AbstractComponent { private final SecurityIndexManager securityIndex; public NativeUsersStore(Settings settings, Client client, SecurityIndexManager securityIndex) { - super(settings); this.settings = settings; this.client = client; this.securityIndex = securityIndex; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java index 027346eadfe..d3d64116032 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java @@ -101,7 +101,6 @@ public class NativeRoleMappingStore extends AbstractComponent implements UserRol private final List realmsToRefresh = new CopyOnWriteArrayList<>(); public NativeRoleMappingStore(Settings settings, Client client, SecurityIndexManager securityIndex) { - super(settings); this.settings = settings; this.client = client; this.securityIndex = securityIndex; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 8db69ff4e47..60c6b7d48fe 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -108,7 +108,6 @@ public class AuthorizationService extends AbstractComponent { public AuthorizationService(Settings settings, CompositeRolesStore rolesStore, ClusterService clusterService, AuditTrailService auditTrail, AuthenticationFailureHandler authcFailureHandler, ThreadPool threadPool, AnonymousUser anonymousUser) { - super(settings); this.rolesStore = rolesStore; this.clusterService = clusterService; this.auditTrail = auditTrail; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java index 26fa8405ccf..cbd0d7ca184 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java @@ -54,7 +54,7 @@ class IndicesAndAliasesResolver { private final RemoteClusterResolver remoteClusterResolver; IndicesAndAliasesResolver(Settings settings, ClusterService clusterService) { - this.nameExpressionResolver = new IndexNameExpressionResolver(settings); + this.nameExpressionResolver = new IndexNameExpressionResolver(); this.remoteClusterResolver = new RemoteClusterResolver(settings, clusterService.getClusterSettings()); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java index a7a73397d50..139393b522e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java @@ -99,7 +99,6 @@ public class CompositeRolesStore extends AbstractComponent { ReservedRolesStore reservedRolesStore, NativePrivilegeStore privilegeStore, List, ActionListener>> rolesProviders, ThreadContext threadContext, XPackLicenseState licenseState) { - super(settings); this.fileRolesStore = fileRolesStore; fileRolesStore.addListener(this::invalidate); this.nativeRolesStore = nativeRolesStore; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java index 83b184fce77..60801274a5e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java @@ -71,7 +71,6 @@ public class FileRolesStore extends AbstractComponent implements BiConsumer> listener, XPackLicenseState licenseState) throws IOException { - super(settings); this.settings = settings; this.file = resolveFile(env); if (listener != null) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java index e0b9ff7b443..66340d0edf3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java @@ -81,7 +81,6 @@ public class NativePrivilegeStore extends AbstractComponent { private final SecurityIndexManager securityIndexManager; public NativePrivilegeStore(Settings settings, Client client, SecurityIndexManager securityIndexManager) { - super(settings); this.settings = settings; this.client = client; this.securityClient = new SecurityClient(client); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index 2c850bc5597..c753c41012f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -97,7 +97,6 @@ public class NativeRolesStore extends AbstractComponent implements BiConsumer(), threadPool, securityContext, destructiveOperations); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/interceptor/IndicesAliasesRequestInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/interceptor/IndicesAliasesRequestInterceptorTests.java index a5798be9746..bf7ab102c2a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/interceptor/IndicesAliasesRequestInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/interceptor/IndicesAliasesRequestInterceptorTests.java @@ -40,7 +40,7 @@ public class IndicesAliasesRequestInterceptorTests extends ESTestCase { when(licenseState.isAuditingAllowed()).thenReturn(true); when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true); ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - AuditTrailService auditTrailService = new AuditTrailService(Settings.EMPTY, Collections.emptyList(), licenseState); + AuditTrailService auditTrailService = new AuditTrailService(Collections.emptyList(), licenseState); Authentication authentication = new Authentication(new User("john", "role"), new RealmRef(null, null, null), new RealmRef(null, null, null)); final FieldPermissions fieldPermissions; @@ -87,7 +87,7 @@ public class IndicesAliasesRequestInterceptorTests extends ESTestCase { when(licenseState.isAuditingAllowed()).thenReturn(true); when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true); ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - AuditTrailService auditTrailService = new AuditTrailService(Settings.EMPTY, Collections.emptyList(), licenseState); + AuditTrailService auditTrailService = new AuditTrailService(Collections.emptyList(), licenseState); Authentication authentication = new Authentication(new User("john", "role"), new RealmRef(null, null, null), new RealmRef(null, null, null)); Role role = Role.builder() diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/interceptor/ResizeRequestInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/interceptor/ResizeRequestInterceptorTests.java index 008928794db..e956ad5e031 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/interceptor/ResizeRequestInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/interceptor/ResizeRequestInterceptorTests.java @@ -44,7 +44,7 @@ public class ResizeRequestInterceptorTests extends ESTestCase { ThreadPool threadPool = mock(ThreadPool.class); ThreadContext threadContext = new ThreadContext(Settings.EMPTY); when(threadPool.getThreadContext()).thenReturn(threadContext); - AuditTrailService auditTrailService = new AuditTrailService(Settings.EMPTY, Collections.emptyList(), licenseState); + AuditTrailService auditTrailService = new AuditTrailService(Collections.emptyList(), licenseState); final Authentication authentication = new Authentication(new User("john", "role"), new RealmRef(null, null, null), null); final FieldPermissions fieldPermissions; final boolean useFls = randomBoolean(); @@ -67,7 +67,7 @@ public class ResizeRequestInterceptorTests extends ESTestCase { threadContext.putTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY, accessControl); ResizeRequestInterceptor resizeRequestInterceptor = - new ResizeRequestInterceptor(Settings.EMPTY, threadPool, licenseState, auditTrailService); + new ResizeRequestInterceptor(threadPool, licenseState, auditTrailService); ElasticsearchSecurityException securityException = expectThrows(ElasticsearchSecurityException.class, () -> resizeRequestInterceptor.intercept(new ResizeRequest("bar", "foo"), authentication, role, action)); @@ -84,7 +84,7 @@ public class ResizeRequestInterceptorTests extends ESTestCase { ThreadPool threadPool = mock(ThreadPool.class); ThreadContext threadContext = new ThreadContext(Settings.EMPTY); when(threadPool.getThreadContext()).thenReturn(threadContext); - AuditTrailService auditTrailService = new AuditTrailService(Settings.EMPTY, Collections.emptyList(), licenseState); + AuditTrailService auditTrailService = new AuditTrailService(Collections.emptyList(), licenseState); final Authentication authentication = new Authentication(new User("john", "role"), new RealmRef(null, null, null), null); Role role = Role.builder() .add(IndexPrivilege.ALL, "target") @@ -94,7 +94,7 @@ public class ResizeRequestInterceptorTests extends ESTestCase { IndicesAccessControl accessControl = new IndicesAccessControl(true, Collections.emptyMap()); threadContext.putTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY, accessControl); ResizeRequestInterceptor resizeRequestInterceptor = - new ResizeRequestInterceptor(Settings.EMPTY, threadPool, licenseState, auditTrailService); + new ResizeRequestInterceptor(threadPool, licenseState, auditTrailService); ElasticsearchSecurityException securityException = expectThrows(ElasticsearchSecurityException.class, () -> resizeRequestInterceptor.intercept(new ResizeRequest("target", "source"), authentication, role, action)); assertEquals("Resizing an index is not allowed when the target index has more permissions than the source index", diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/AuditTrailServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/AuditTrailServiceTests.java index 13a7e5c3cf7..6ce5ad0ccfa 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/AuditTrailServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/AuditTrailServiceTests.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.security.audit; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESTestCase; @@ -46,7 +45,7 @@ public class AuditTrailServiceTests extends ESTestCase { } auditTrails = unmodifiableList(auditTrailsBuilder); licenseState = mock(XPackLicenseState.class); - service = new AuditTrailService(Settings.EMPTY, auditTrails, licenseState); + service = new AuditTrailService(auditTrails, licenseState); isAuditingAllowed = randomBoolean(); when(licenseState.isAuditingAllowed()).thenReturn(isAuditingAllowed); token = mock(AuthenticationToken.class); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index 3e7543ffd99..31b9551b903 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -117,7 +117,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { .put("cluster.remote.other_remote.seeds", "127.0.0.1:" + randomIntBetween(9351, 9399)) .build(); - indexNameExpressionResolver = new IndexNameExpressionResolver(Settings.EMPTY); + indexNameExpressionResolver = new IndexNameExpressionResolver(); final boolean withAlias = randomBoolean(); final String securityIndexName = SECURITY_INDEX_NAME + (withAlias ? "-" + randomAlphaOfLength(5) : ""); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IpFilterRemoteAddressFilterTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IpFilterRemoteAddressFilterTests.java index ee40d3e24bb..f17e9f4ff08 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IpFilterRemoteAddressFilterTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IpFilterRemoteAddressFilterTests.java @@ -57,7 +57,7 @@ public class IpFilterRemoteAddressFilterTests extends ESTestCase { IPFilter.PROFILE_FILTER_DENY_SETTING))); XPackLicenseState licenseState = mock(XPackLicenseState.class); when(licenseState.isIpFilteringAllowed()).thenReturn(true); - AuditTrailService auditTrailService = new AuditTrailService(settings, Collections.emptyList(), licenseState); + AuditTrailService auditTrailService = new AuditTrailService(Collections.emptyList(), licenseState); IPFilter ipFilter = new IPFilter(settings, auditTrailService, clusterSettings, licenseState); ipFilter.setBoundTransportAddress(transport.boundAddress(), transport.profileBoundAddresses()); if (isHttpEnabled) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/NioIPFilterTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/NioIPFilterTests.java index 398b783f642..3df00018af4 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/NioIPFilterTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/NioIPFilterTests.java @@ -58,7 +58,7 @@ public class NioIPFilterTests extends ESTestCase { IPFilter.PROFILE_FILTER_DENY_SETTING))); XPackLicenseState licenseState = mock(XPackLicenseState.class); when(licenseState.isIpFilteringAllowed()).thenReturn(true); - AuditTrailService auditTrailService = new AuditTrailService(settings, Collections.emptyList(), licenseState); + AuditTrailService auditTrailService = new AuditTrailService(Collections.emptyList(), licenseState); IPFilter ipFilter = new IPFilter(settings, auditTrailService, clusterSettings, licenseState); ipFilter.setBoundTransportAddress(transport.boundAddress(), transport.profileBoundAddresses()); if (isHttpEnabled) { diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/cli/ErrorsTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/cli/ErrorsTestCase.java index 37180db15d5..03267329aa3 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/cli/ErrorsTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/cli/ErrorsTestCase.java @@ -27,7 +27,7 @@ public abstract class ErrorsTestCase extends CliIntegrationTestCase implements o @Override public void testSelectInvalidSql() throws Exception { assertFoundOneProblem(command("SELECT * FRO")); - assertEquals("line 1:8: Cannot determine columns for *" + END, readLine()); + assertEquals("line 1:8: Cannot determine columns for [*]" + END, readLine()); } @Override diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ErrorsTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ErrorsTestCase.java index b32e1ed78ef..bb9d5f2c2fc 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ErrorsTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ErrorsTestCase.java @@ -20,7 +20,7 @@ public class ErrorsTestCase extends JdbcIntegrationTestCase implements org.elast public void testSelectInvalidSql() throws Exception { try (Connection c = esJdbc()) { SQLException e = expectThrows(SQLException.class, () -> c.prepareStatement("SELECT * FRO").executeQuery()); - assertEquals("Found 1 problem(s)\nline 1:8: Cannot determine columns for *", e.getMessage()); + assertEquals("Found 1 problem(s)\nline 1:8: Cannot determine columns for [*]", e.getMessage()); } } diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java index 73fa3ef1d77..7287784089f 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java @@ -202,7 +202,7 @@ public abstract class RestSqlTestCase extends ESRestTestCase implements ErrorsTe @Override public void testSelectInvalidSql() { String mode = randomFrom("jdbc", "plain"); - expectBadRequest(() -> runSql(mode, "SELECT * FRO"), containsString("1:8: Cannot determine columns for *")); + expectBadRequest(() -> runSql(mode, "SELECT * FRO"), containsString("1:8: Cannot determine columns for [*]")); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java index 9876718d4d2..b376e38e40b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java @@ -380,7 +380,13 @@ public class Analyzer extends RuleExecutor { List output = child.output(); for (NamedExpression ne : projections) { if (ne instanceof UnresolvedStar) { - result.addAll(expandStar((UnresolvedStar) ne, output)); + List expanded = expandStar((UnresolvedStar) ne, output); + // the field exists, but cannot be expanded (no sub-fields) + if (expanded.isEmpty()) { + result.add(ne); + } else { + result.addAll(expanded); + } } else if (ne instanceof UnresolvedAlias) { UnresolvedAlias ua = (UnresolvedAlias) ne; if (ua.child() instanceof UnresolvedStar) { @@ -403,6 +409,13 @@ public class Analyzer extends RuleExecutor { // since this is an unresolved start we don't know whether it's a path or an actual qualifier Attribute q = resolveAgainstList(us.qualifier(), output); + // the wildcard couldn't be expanded because the field doesn't exist at all + // so, add to the list of expanded attributes its qualifier (the field without the wildcard) + // the qualifier will be unresolved and later used in the error message presented to the user + if (q == null) { + expanded.add(us.qualifier()); + return expanded; + } // now use the resolved 'qualifier' to match for (Attribute attr : output) { // filter the attributes that match based on their path diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java index c8834240c6c..5394625d882 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java @@ -19,7 +19,7 @@ import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute; import org.elasticsearch.xpack.sql.expression.function.Functions; import org.elasticsearch.xpack.sql.expression.function.Score; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.sql.expression.predicate.In; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.sql.plan.logical.Aggregate; import org.elasticsearch.xpack.sql.plan.logical.Distinct; import org.elasticsearch.xpack.sql.plan.logical.Filter; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedStar.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedStar.java index 948a5465efa..c9ef08eab5a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedStar.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedStar.java @@ -66,12 +66,12 @@ public class UnresolvedStar extends UnresolvedNamedExpression { } private String message() { - return (qualifier() != null ? qualifier() + "." : "") + "*"; + return (qualifier() != null ? qualifier().qualifiedName() + "." : "") + "*"; } @Override public String unresolvedMessage() { - return "Cannot determine columns for " + message(); + return "Cannot determine columns for [" + message() + "]"; } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java index ae35f9c760c..44970e76911 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java @@ -25,9 +25,14 @@ import org.elasticsearch.xpack.sql.expression.gen.processor.ChainingProcessor; import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.sql.expression.gen.processor.HitExtractorProcessor; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; +import org.elasticsearch.xpack.sql.expression.predicate.IsNotNullProcessor; +import org.elasticsearch.xpack.sql.expression.predicate.logical.BinaryLogicProcessor; +import org.elasticsearch.xpack.sql.expression.predicate.logical.NotProcessor; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.UnaryArithmeticProcessor; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonProcessor; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.InProcessor; +import org.elasticsearch.xpack.sql.expression.predicate.regex.RegexProcessor; import java.util.ArrayList; import java.util.List; @@ -49,13 +54,23 @@ public final class Processors { entries.add(new Entry(Processor.class, CastProcessor.NAME, CastProcessor::new)); entries.add(new Entry(Processor.class, ChainingProcessor.NAME, ChainingProcessor::new)); - // comparators - entries.add(new Entry(Processor.class, BinaryComparisonProcessor.NAME, BinaryComparisonProcessor::new)); + // logical + entries.add(new Entry(Processor.class, BinaryLogicProcessor.NAME, BinaryLogicProcessor::new)); + entries.add(new Entry(Processor.class, NotProcessor.NAME, NotProcessor::new)); + // null + entries.add(new Entry(Processor.class, IsNotNullProcessor.NAME, IsNotNullProcessor::new)); // arithmetic entries.add(new Entry(Processor.class, BinaryArithmeticProcessor.NAME, BinaryArithmeticProcessor::new)); entries.add(new Entry(Processor.class, UnaryArithmeticProcessor.NAME, UnaryArithmeticProcessor::new)); entries.add(new Entry(Processor.class, BinaryMathProcessor.NAME, BinaryMathProcessor::new)); + // comparators + entries.add(new Entry(Processor.class, BinaryComparisonProcessor.NAME, BinaryComparisonProcessor::new)); + entries.add(new Entry(Processor.class, InProcessor.NAME, InProcessor::new)); + // regex + entries.add(new Entry(Processor.class, RegexProcessor.NAME, RegexProcessor::new)); + + // datetime entries.add(new Entry(Processor.class, DateTimeProcessor.NAME, DateTimeProcessor::new)); entries.add(new Entry(Processor.class, NamedDateTimeProcessor.NAME, NamedDateTimeProcessor::new)); @@ -73,4 +88,12 @@ public final class Processors { entries.add(new Entry(Processor.class, SubstringFunctionProcessor.NAME, SubstringFunctionProcessor::new)); return entries; } -} \ No newline at end of file + + public static List process(List processors, Object input) { + List values = new ArrayList<>(processors.size()); + for (Processor p : processors) { + values.add(p.process(input)); + } + return values; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java index 9aabb3f10ec..1c2ccfeeb29 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java @@ -27,10 +27,12 @@ import org.elasticsearch.xpack.sql.expression.predicate.logical.NotProcessor; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.UnaryArithmeticProcessor.UnaryArithmeticOperation; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.BinaryComparisonProcessor.BinaryComparisonOperation; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.InProcessor; import org.elasticsearch.xpack.sql.expression.predicate.regex.RegexProcessor.RegexOperation; import org.elasticsearch.xpack.sql.util.StringUtils; import java.time.ZonedDateTime; +import java.util.List; import java.util.Map; /** @@ -113,6 +115,10 @@ public final class InternalSqlScriptUtils { return IsNotNullProcessor.apply(expression); } + public static Boolean in(Object value, List values) { + return InProcessor.apply(value, values); + } + // // Regex // @@ -375,4 +381,4 @@ public final class InternalSqlScriptUtils { public static String ucase(String s) { return (String) StringOperation.UCASE.apply(s); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Scripts.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Scripts.java index f9e2588a9c0..21ac12e51da 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Scripts.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/Scripts.java @@ -87,4 +87,4 @@ public final class Scripts { .build(), dataType); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/IsNotNullProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/IsNotNullProcessor.java index b29ae263f39..a9bec52a859 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/IsNotNullProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/IsNotNullProcessor.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.sql.expression.predicate; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; @@ -18,6 +19,8 @@ public class IsNotNullProcessor implements Processor { private IsNotNullProcessor() {} + public IsNotNullProcessor(StreamInput in) throws IOException {} + @Override public String getWriteableName() { return NAME; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/NotProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/NotProcessor.java index 14425d35578..8c82f2ab3cc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/NotProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/logical/NotProcessor.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.sql.expression.predicate.logical; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; @@ -19,6 +20,8 @@ public class NotProcessor implements Processor { private NotProcessor() {} + public NotProcessor(StreamInput in) throws IOException {} + @Override public String getWriteableName() { return NAME; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/In.java similarity index 64% rename from x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java rename to x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/In.java index 9b16b77511c..41cbeee9842 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/In.java @@ -3,20 +3,17 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.predicate; +package org.elasticsearch.xpack.sql.expression.predicate.operator.comparison; import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.Foldables; import org.elasticsearch.xpack.sql.expression.NamedExpression; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunctionAttribute; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; -import org.elasticsearch.xpack.sql.expression.gen.script.Params; -import org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptWeaver; -import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.Comparisons; -import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.InPipe; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; @@ -30,7 +27,6 @@ import java.util.Objects; import java.util.StringJoiner; import java.util.stream.Collectors; -import static java.lang.String.format; import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; public class In extends NamedExpression implements ScriptWeaver { @@ -84,24 +80,12 @@ public class In extends NamedExpression implements ScriptWeaver { @Override public Boolean fold() { - if (value.dataType() == DataType.NULL) { + // Optimization for early return and Query folding to LocalExec + if (value.dataType() == DataType.NULL || + list.size() == 1 && list.get(0).dataType() == DataType.NULL) { return null; } - if (list.size() == 1 && list.get(0).dataType() == DataType.NULL) { - return false; - } - - Object foldedLeftValue = value.fold(); - Boolean result = false; - for (Expression rightValue : list) { - Boolean compResult = Comparisons.eq(foldedLeftValue, rightValue.fold()); - if (compResult == null) { - result = null; - } else if (compResult) { - return true; - } - } - return result; + return InProcessor.apply(value.fold(), Foldables.valuesOf(list, value.dataType())); } @Override @@ -122,34 +106,18 @@ public class In extends NamedExpression implements ScriptWeaver { @Override public ScriptTemplate asScript() { - StringJoiner sj = new StringJoiner(" || "); ScriptTemplate leftScript = asScript(value); - List rightParams = new ArrayList<>(); - String scriptPrefix = leftScript + "=="; - LinkedHashSet values = list.stream().map(Expression::fold).collect(Collectors.toCollection(LinkedHashSet::new)); - for (Object valueFromList : values) { - // if checked against null => false - if (valueFromList != null) { - if (valueFromList instanceof Expression) { - ScriptTemplate rightScript = asScript((Expression) valueFromList); - sj.add(scriptPrefix + rightScript.template()); - rightParams.add(rightScript.params()); - } else { - if (valueFromList instanceof String) { - sj.add(scriptPrefix + '"' + valueFromList + '"'); - } else { - sj.add(scriptPrefix + valueFromList.toString()); - } - } - } - } + // remove duplicates + List values = new ArrayList<>(new LinkedHashSet<>(Foldables.valuesOf(list, value.dataType()))); + values.removeIf(Objects::isNull); - ParamsBuilder paramsBuilder = paramsBuilder().script(leftScript.params()); - for (Params p : rightParams) { - paramsBuilder = paramsBuilder.script(p); - } - - return new ScriptTemplate(format(Locale.ROOT, "%s", sj.toString()), paramsBuilder.build(), dataType()); + return new ScriptTemplate( + formatTemplate(String.format(Locale.ROOT, "{sql}.in(%s, {})", leftScript.template())), + paramsBuilder() + .script(leftScript.params()) + .variable(values) + .build(), + dataType()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InProcessor.java index 0a901b5b5e6..82233e250e3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InProcessor.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.expression.predicate.operator.comparison; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import java.io.IOException; @@ -19,7 +20,7 @@ public class InProcessor implements Processor { private final List processsors; - public InProcessor(List processors) { + InProcessor(List processors) { this.processsors = processors; } @@ -40,14 +41,17 @@ public class InProcessor implements Processor { @Override public Object process(Object input) { Object leftValue = processsors.get(processsors.size() - 1).process(input); - Boolean result = false; + return apply(leftValue, Processors.process(processsors.subList(0, processsors.size() - 1), leftValue)); + } - for (int i = 0; i < processsors.size() - 1; i++) { - Boolean compResult = Comparisons.eq(leftValue, processsors.get(i).process(input)); + public static Boolean apply(Object input, List values) { + Boolean result = Boolean.FALSE; + for (Object v : values) { + Boolean compResult = Comparisons.eq(input, v); if (compResult == null) { result = null; - } else if (compResult) { - return true; + } else if (compResult == Boolean.TRUE) { + return Boolean.TRUE; } } return result; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java index e0e7ad83cad..e2b8d02589a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java @@ -39,7 +39,6 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunctionAttr import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator.Negateable; import org.elasticsearch.xpack.sql.expression.predicate.BinaryPredicate; -import org.elasticsearch.xpack.sql.expression.predicate.In; import org.elasticsearch.xpack.sql.expression.predicate.IsNotNull; import org.elasticsearch.xpack.sql.expression.predicate.Predicates; import org.elasticsearch.xpack.sql.expression.predicate.Range; @@ -50,6 +49,7 @@ import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.Bina import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.GreaterThanOrEqual; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.sql.plan.logical.Aggregate; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java index 84bb213fb68..893f71c8bcb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java @@ -24,7 +24,7 @@ import org.elasticsearch.xpack.sql.expression.UnresolvedStar; import org.elasticsearch.xpack.sql.expression.function.Function; import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.Cast; -import org.elasticsearch.xpack.sql.expression.predicate.In; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.sql.expression.predicate.IsNotNull; import org.elasticsearch.xpack.sql.expression.predicate.Range; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.MatchQueryPredicate; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java index 3605898210f..c17c1311ccc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java @@ -30,7 +30,7 @@ import org.elasticsearch.xpack.sql.expression.gen.pipeline.AggPathInput; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.pipeline.UnaryPipe; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; -import org.elasticsearch.xpack.sql.expression.predicate.In; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.sql.plan.physical.AggregateExec; import org.elasticsearch.xpack.sql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.sql.plan.physical.FilterExec; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java index 9fcd542ef63..4e0bdea88b8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java @@ -31,7 +31,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeHistogramFunction; import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; -import org.elasticsearch.xpack.sql.expression.predicate.In; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.sql.expression.predicate.IsNotNull; import org.elasticsearch.xpack.sql.expression.predicate.Range; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.MatchQueryPredicate; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermsQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermsQuery.java index 91ea49a8a3c..66d206f829a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermsQuery.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermsQuery.java @@ -9,7 +9,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Foldables; import org.elasticsearch.xpack.sql.tree.Location; -import org.elasticsearch.xpack.sql.type.DataType; +import org.elasticsearch.xpack.sql.type.DataTypes; import java.util.Collections; import java.util.LinkedHashSet; @@ -27,7 +27,7 @@ public class TermsQuery extends LeafQuery { public TermsQuery(Location location, String term, List values) { super(location); this.term = term; - values.removeIf(e -> e.dataType() == DataType.NULL); + values.removeIf(e -> DataTypes.isNull(e.dataType())); if (values.isEmpty()) { this.values = Collections.emptySet(); } else { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java index 3312f449ec6..53f7e6b1ab1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java @@ -105,6 +105,9 @@ public abstract class DataTypeConversion { if (to == DataType.NULL) { return Conversion.NULL; } + if (from == DataType.NULL) { + return Conversion.NULL; + } Conversion conversion = conversion(from, to); if (conversion == null) { diff --git a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt index 998dab84783..827947424b0 100644 --- a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt +++ b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt @@ -24,6 +24,7 @@ class org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalS Boolean lte(Object, Object) Boolean gt(Object, Object) Boolean gte(Object, Object) + Boolean in(Object, java.util.List) # # Logical @@ -107,4 +108,4 @@ class org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalS String space(Number) String substring(String, Number, Number) String ucase(String) -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index 512b2da732a..3b03079ca72 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -49,6 +49,38 @@ public class VerifierErrorMessagesTests extends ESTestCase { public void testMissingColumn() { assertEquals("1:8: Unknown column [xxx]", verify("SELECT xxx FROM test")); } + + public void testMissingColumnWithWildcard() { + assertEquals("1:8: Unknown column [xxx]", verify("SELECT xxx.* FROM test")); + } + + public void testMisspelledColumnWithWildcard() { + assertEquals("1:8: Unknown column [tex], did you mean [text]?", verify("SELECT tex.* FROM test")); + } + + public void testColumnWithNoSubFields() { + assertEquals("1:8: Cannot determine columns for [text.*]", verify("SELECT text.* FROM test")); + } + + public void testMultipleColumnsWithWildcard1() { + assertEquals("1:14: Unknown column [a]\n" + + "line 1:17: Unknown column [b]\n" + + "line 1:22: Unknown column [c]\n" + + "line 1:25: Unknown column [tex], did you mean [text]?", verify("SELECT bool, a, b.*, c, tex.* FROM test")); + } + + public void testMultipleColumnsWithWildcard2() { + assertEquals("1:8: Unknown column [tex], did you mean [text]?\n" + + "line 1:21: Unknown column [a]\n" + + "line 1:24: Unknown column [dat], did you mean [date]?\n" + + "line 1:31: Unknown column [c]", verify("SELECT tex.*, bool, a, dat.*, c FROM test")); + } + + public void testMultipleColumnsWithWildcard3() { + assertEquals("1:8: Unknown column [ate], did you mean [date]?\n" + + "line 1:21: Unknown column [keyw], did you mean [keyword]?\n" + + "line 1:29: Unknown column [da], did you mean [date]?" , verify("SELECT ate.*, bool, keyw.*, da FROM test")); + } public void testMisspelledColumn() { assertEquals("1:8: Unknown column [txt], did you mean [text]?", verify("SELECT txt FROM test")); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/ProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/ProcessorTests.java new file mode 100644 index 00000000000..e3d51f1f7dd --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/ProcessorTests.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression; + +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; +import org.elasticsearch.xpack.sql.tree.NodeSubclassTests; +import org.junit.BeforeClass; + +import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.LinkedHashSet; +import java.util.List; + +import static java.util.stream.Collectors.toCollection; + + +public class ProcessorTests extends ESTestCase { + + private static List> processors; + + @BeforeClass + public static void init() throws Exception { + processors = NodeSubclassTests.subclassesOf(Processor.class); + } + + + public void testProcessorRegistration() throws Exception { + LinkedHashSet registered = Processors.getNamedWriteables().stream() + .map(e -> e.name) + .collect(toCollection(LinkedHashSet::new)); + + // discover available processors + int missing = processors.size() - registered.size(); + + + if (missing > 0) { + List notRegistered = new ArrayList<>(); + for (Class proc : processors) { + String procName = proc.getName(); + assertTrue(procName + " does NOT implement NamedWriteable", NamedWriteable.class.isAssignableFrom(proc)); + Field name = null; + String value = null; + try { + name = proc.getField("NAME"); + } catch (Exception ex) { + fail(procName + " does NOT provide a NAME field\n" + ex); + } + try { + value = name.get(proc).toString(); + } catch (Exception ex) { + fail(procName + " does NOT provide a static NAME field\n" + ex); + } + if (!registered.contains(value)) { + notRegistered.add(procName); + } + } + + fail(missing + " processor(s) not registered : " + notRegistered); + } else { + assertEquals("Detection failed: discovered more registered processors than classes", 0, missing); + } + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/InProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InProcessorTests.java similarity index 94% rename from x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/InProcessorTests.java rename to x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InProcessorTests.java index 59c9df1f899..3303072e500 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/InProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InProcessorTests.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.predicate; +package org.elasticsearch.xpack.sql.expression.predicate.operator.comparison; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable.Reader; @@ -11,7 +11,6 @@ import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; -import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.InProcessor; import java.util.Arrays; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/InTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InTests.java similarity index 95% rename from x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/InTests.java rename to x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InTests.java index 984fb833feb..c78014afbc4 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/InTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InTests.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.sql.expression.predicate; +package org.elasticsearch.xpack.sql.expression.predicate.operator.comparison; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.expression.Literal; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java index 3112827117a..68a0ea9e949 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java @@ -35,7 +35,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.math.Floor; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Ascii; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Repeat; import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator; -import org.elasticsearch.xpack.sql.expression.predicate.In; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.sql.expression.predicate.IsNotNull; import org.elasticsearch.xpack.sql.expression.predicate.Range; import org.elasticsearch.xpack.sql.expression.predicate.logical.And; @@ -345,7 +345,7 @@ public class OptimizerTests extends ESTestCase { public void testConstantFoldingIn_RightValueIsNull() { In in = new In(EMPTY, getFieldAttribute(), Arrays.asList(NULL, NULL)); Literal result= (Literal) new ConstantFolding().rule(in); - assertEquals(false, result.value()); + assertNull(result.value()); } public void testConstantFoldingIn_LeftValueIsNull() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java index 05f9c136515..fbe0ab2aa81 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java @@ -33,6 +33,7 @@ import org.junit.BeforeClass; import java.util.Map; import java.util.TimeZone; +import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.core.StringStartsWith.startsWith; public class QueryTranslatorTests extends ESTestCase { @@ -208,12 +209,11 @@ public class QueryTranslatorTests extends ESTestCase { QueryTranslation translation = QueryTranslator.toQuery(condition, false); assertNull(translation.aggFilter); assertTrue(translation.query instanceof ScriptQuery); - ScriptQuery sq = (ScriptQuery) translation.query; - assertEquals("InternalSqlScriptUtils.nullSafeFilter(" + - "InternalSqlScriptUtils.power(InternalSqlScriptUtils.docValue(doc,params.v0),params.v1)==10 || " + - "InternalSqlScriptUtils.power(InternalSqlScriptUtils.docValue(doc,params.v0),params.v1)==20)", - sq.script().toString()); - assertEquals("[{v=int}, {v=2}]", sq.script().params().toString()); + ScriptQuery sc = (ScriptQuery) translation.query; + assertEquals("InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.in(" + + "InternalSqlScriptUtils.power(InternalSqlScriptUtils.docValue(doc,params.v0),params.v1), params.v2))", + sc.script().toString()); + assertEquals("[{v=int}, {v=2}, {v=[10.0, 20.0]}]", sc.script().params().toString()); } public void testTranslateInExpression_HavingClause_Painless() { @@ -225,9 +225,10 @@ public class QueryTranslatorTests extends ESTestCase { QueryTranslation translation = QueryTranslator.toQuery(condition, true); assertNull(translation.query); AggFilter aggFilter = translation.aggFilter; - assertEquals("InternalSqlScriptUtils.nullSafeFilter(params.a0==10 || params.a0==20)", + assertEquals("InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.in(params.a0, params.v0))", aggFilter.scriptTemplate().toString()); assertThat(aggFilter.scriptTemplate().params().toString(), startsWith("[{a=MAX(int){a->")); + assertThat(aggFilter.scriptTemplate().params().toString(), endsWith(", {v=[10, 20]}]")); } public void testTranslateInExpression_HavingClause_PainlessOneArg() { @@ -239,9 +240,10 @@ public class QueryTranslatorTests extends ESTestCase { QueryTranslation translation = QueryTranslator.toQuery(condition, true); assertNull(translation.query); AggFilter aggFilter = translation.aggFilter; - assertEquals("InternalSqlScriptUtils.nullSafeFilter(params.a0==10)", + assertEquals("InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.in(params.a0, params.v0))", aggFilter.scriptTemplate().toString()); assertThat(aggFilter.scriptTemplate().params().toString(), startsWith("[{a=MAX(int){a->")); + assertThat(aggFilter.scriptTemplate().params().toString(), endsWith(", {v=[10]}]")); } @@ -254,8 +256,9 @@ public class QueryTranslatorTests extends ESTestCase { QueryTranslation translation = QueryTranslator.toQuery(condition, true); assertNull(translation.query); AggFilter aggFilter = translation.aggFilter; - assertEquals("InternalSqlScriptUtils.nullSafeFilter(params.a0==10 || params.a0==20 || params.a0==30)", + assertEquals("InternalSqlScriptUtils.nullSafeFilter(InternalSqlScriptUtils.in(params.a0, params.v0))", aggFilter.scriptTemplate().toString()); assertThat(aggFilter.scriptTemplate().params().toString(), startsWith("[{a=MAX(int){a->")); + assertThat(aggFilter.scriptTemplate().params().toString(), endsWith(", {v=[10, 20, 30]}]")); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlPluginTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlPluginTests.java index 4a602246665..363254f414c 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlPluginTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/SqlPluginTests.java @@ -31,7 +31,7 @@ public class SqlPluginTests extends ESTestCase { assertThat(plugin.getActions(), empty()); assertThat(plugin.getRestHandlers(Settings.EMPTY, mock(RestController.class), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), - IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, new SettingsFilter(Settings.EMPTY, Collections.emptyList()), + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, new SettingsFilter(Collections.emptyList()), mock(IndexNameExpressionResolver.class), () -> mock(DiscoveryNodes.class)), empty()); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java index caacee0f4ba..9510b8d2213 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.tree; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.test.ESTestCase; @@ -24,7 +25,7 @@ import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipesTests; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; -import org.elasticsearch.xpack.sql.expression.predicate.In; +import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.FullTextPredicate; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.InPipe; import org.elasticsearch.xpack.sql.expression.predicate.regex.LikePattern; @@ -585,7 +586,7 @@ public class NodeSubclassTests> extends ESTestCas /** * Find all subclasses of a particular class. */ - private static List> subclassesOf(Class clazz) throws IOException { + public static List> subclassesOf(Class clazz) throws IOException { @SuppressWarnings("unchecked") // The map is built this way List> lookup = (List>) subclassCache.get(clazz); if (lookup != null) { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java index b191646a9cd..7a04139430e 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java @@ -224,6 +224,12 @@ public class DataTypeConversionTests extends ESTestCase { assertNull(conversion.convert(10.0)); } + public void testConversionFromNull() { + Conversion conversion = DataTypeConversion.conversionFor(DataType.NULL, DataType.INTEGER); + assertNull(conversion.convert(null)); + assertNull(conversion.convert(10)); + } + public void testConversionToIdentity() { Conversion conversion = DataTypeConversion.conversionFor(DataType.INTEGER, DataType.INTEGER); assertNull(conversion.convert(null)); diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.watcher.stats.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.watcher.stats.json index 40eda835a4b..86c982a8881 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.watcher.stats.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.watcher.stats.json @@ -8,14 +8,14 @@ "parts": { "metric": { "type" : "enum", - "options" : ["_all", "queued_watches", "pending_watches"], + "options" : ["_all", "queued_watches", "current_watches", "pending_watches"], "description" : "Controls what additional stat metrics should be include in the response" } }, "params": { "metric": { "type" : "enum", - "options" : ["_all", "queued_watches", "pending_watches"], + "options" : ["_all", "queued_watches", "current_watches", "pending_watches"], "description" : "Controls what additional stat metrics should be include in the response" }, "emit_stacktraces": { diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/stats/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/stats/10_basic.yml index 9844dea9135..5a90af37252 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/stats/10_basic.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/stats/10_basic.yml @@ -12,3 +12,55 @@ emit_stacktraces: "true" - match: { "manually_stopped": false } - match: { "stats.0.watcher_state": "started" } + +--- +"Test watcher stats current watches": + - skip: + version: " - 6.99.99" + reason: metrics were fixed in 7.0.0 + + - do: + xpack.watcher.stats: + metric: "current_watches" + + - is_false: stats.0.queued_watches + - is_true: stats.0.current_watches + +--- +"Test watcher stats queued watches": + - skip: + version: " - 6.99.99" + reason: metrics were fixed in 7.0.0 + + - do: + xpack.watcher.stats: + metric: "queued_watches" + + - is_false: stats.0.current_watches + - is_true: stats.0.queued_watches + +--- +"Test watcher stats queued watches using pending_watches": + - skip: + version: " - 6.99.99" + reason: metrics were fixed in 7.0.0 + features: warnings + + - do: + warnings: + - 'The pending_watches parameter is deprecated, use queued_watches instead' + + xpack.watcher.stats: + metric: "pending_watches" + + - is_false: stats.0.current_watches + - is_true: stats.0.queued_watches + +--- +"Test watcher stats all watches": + - do: + xpack.watcher.stats: + metric: "_all" + + - is_true: stats.0.current_watches + - is_true: stats.0.queued_watches diff --git a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheck.java b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheck.java index d91cc2a6f01..2b46a661e17 100644 --- a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheck.java +++ b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheck.java @@ -11,7 +11,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.protocol.xpack.migration.UpgradeActionRequired; import org.elasticsearch.script.Script; @@ -43,17 +42,16 @@ public class IndexUpgradeCheck extends AbstractComponent { * Creates a new upgrade check * * @param name - the name of the check - * @param settings - system settings * @param actionRequired - return true if they can work with the index with specified name * @param client - client * @param clusterService - cluster service * @param types - a list of types that the reindexing should be limited to * @param updateScript - the upgrade script that should be used during reindexing */ - public IndexUpgradeCheck(String name, Settings settings, + public IndexUpgradeCheck(String name, Function actionRequired, Client client, ClusterService clusterService, String[] types, Script updateScript) { - this(name, settings, actionRequired, client, clusterService, types, updateScript, + this(name, actionRequired, client, clusterService, types, updateScript, listener -> listener.onResponse(null), (t, listener) -> listener.onResponse(TransportResponse.Empty.INSTANCE)); } @@ -61,7 +59,6 @@ public class IndexUpgradeCheck extends AbstractComponent { * Creates a new upgrade check * * @param name - the name of the check - * @param settings - system settings * @param actionRequired - return true if they can work with the index with specified name * @param client - client * @param clusterService - cluster service @@ -70,12 +67,11 @@ public class IndexUpgradeCheck extends AbstractComponent { * @param preUpgrade - action that should be performed before upgrade * @param postUpgrade - action that should be performed after upgrade */ - public IndexUpgradeCheck(String name, Settings settings, + public IndexUpgradeCheck(String name, Function actionRequired, Client client, ClusterService clusterService, String[] types, Script updateScript, Consumer> preUpgrade, BiConsumer> postUpgrade) { - super(settings); this.name = name; this.actionRequired = actionRequired; this.reindexer = new InternalIndexReindexer<>(client, clusterService, IndexUpgradeCheckVersion.UPRADE_VERSION, updateScript, diff --git a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeService.java b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeService.java index ad0ebd6815f..1f51ff391ce 100644 --- a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeService.java +++ b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeService.java @@ -13,7 +13,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.protocol.xpack.migration.UpgradeActionRequired; @@ -29,10 +28,9 @@ public class IndexUpgradeService extends AbstractComponent { private final IndexNameExpressionResolver indexNameExpressionResolver; - public IndexUpgradeService(Settings settings, List upgradeChecks) { - super(settings); + public IndexUpgradeService(List upgradeChecks) { this.upgradeChecks = upgradeChecks; - this.indexNameExpressionResolver = new IndexNameExpressionResolver(settings); + this.indexNameExpressionResolver = new IndexNameExpressionResolver(); } /** diff --git a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java index e454ac4a014..985baeaf9ab 100644 --- a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java +++ b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java @@ -46,11 +46,9 @@ public class Upgrade extends Plugin implements ActionPlugin { public static final Version UPGRADE_INTRODUCED = Version.CURRENT.minimumCompatibilityVersion(); - private final Settings settings; private final List> upgradeCheckFactories; - public Upgrade(Settings settings) { - this.settings = settings; + public Upgrade() { this.upgradeCheckFactories = new ArrayList<>(); } @@ -63,7 +61,7 @@ public class Upgrade extends Plugin implements ActionPlugin { for (BiFunction checkFactory : upgradeCheckFactories) { upgradeChecks.add(checkFactory.apply(client, clusterService)); } - return Collections.singletonList(new IndexUpgradeService(settings, Collections.unmodifiableList(upgradeChecks))); + return Collections.singletonList(new IndexUpgradeService(Collections.unmodifiableList(upgradeChecks))); } @Override diff --git a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java index 56a17674b49..d5e2fe9b74b 100644 --- a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java +++ b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.reindex.BulkByScrollResponse; @@ -88,7 +87,7 @@ public class IndexUpgradeIT extends IndexUpgradeIntegTestCase { AtomicBoolean postUpgradeIsCalled = new AtomicBoolean(); IndexUpgradeCheck check = new IndexUpgradeCheck( - "test", Settings.EMPTY, + "test", indexMetaData -> { if (indexMetaData.getIndex().getName().equals(testIndex)) { return UpgradeActionRequired.UPGRADE; @@ -116,7 +115,7 @@ public class IndexUpgradeIT extends IndexUpgradeIntegTestCase { ); ensureYellow(testIndex); - IndexUpgradeService service = new IndexUpgradeService(Settings.EMPTY, Collections.singletonList(check)); + IndexUpgradeService service = new IndexUpgradeService(Collections.singletonList(check)); PlainActionFuture future = PlainActionFuture.newFuture(); service.upgrade(new TaskId("abc", 123), testIndex, clusterService().state(), future); diff --git a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeServiceTests.java b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeServiceTests.java index f980450c07f..961b86a53cd 100644 --- a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeServiceTests.java +++ b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeServiceTests.java @@ -27,7 +27,7 @@ import static org.hamcrest.core.IsEqual.equalTo; public class IndexUpgradeServiceTests extends ESTestCase { - private IndexUpgradeCheck upgradeBarCheck = new IndexUpgradeCheck("upgrade_bar", Settings.EMPTY, + private IndexUpgradeCheck upgradeBarCheck = new IndexUpgradeCheck("upgrade_bar", (Function) indexMetaData -> { if ("bar".equals(indexMetaData.getSettings().get("test.setting"))) { return UpgradeActionRequired.UPGRADE; @@ -36,7 +36,7 @@ public class IndexUpgradeServiceTests extends ESTestCase { } }, null, null, null, null); - private IndexUpgradeCheck reindexFooCheck = new IndexUpgradeCheck("reindex_foo", Settings.EMPTY, + private IndexUpgradeCheck reindexFooCheck = new IndexUpgradeCheck("reindex_foo", (Function) indexMetaData -> { if ("foo".equals(indexMetaData.getSettings().get("test.setting"))) { return UpgradeActionRequired.REINDEX; @@ -45,10 +45,10 @@ public class IndexUpgradeServiceTests extends ESTestCase { } }, null, null, null, null); - private IndexUpgradeCheck everythingIsFineCheck = new IndexUpgradeCheck("everything_is_fine", Settings.EMPTY, + private IndexUpgradeCheck everythingIsFineCheck = new IndexUpgradeCheck("everything_is_fine", indexMetaData -> UpgradeActionRequired.UP_TO_DATE, null, null, null, null); - private IndexUpgradeCheck unreachableCheck = new IndexUpgradeCheck("unreachable", Settings.EMPTY, + private IndexUpgradeCheck unreachableCheck = new IndexUpgradeCheck("unreachable", (Function) indexMetaData -> { fail("Unreachable check is called"); return null; @@ -57,14 +57,14 @@ public class IndexUpgradeServiceTests extends ESTestCase { public void testIndexUpgradeServiceMultipleCheck() throws Exception { IndexUpgradeService service; if (randomBoolean()) { - service = new IndexUpgradeService(Settings.EMPTY, Arrays.asList( + service = new IndexUpgradeService(Arrays.asList( upgradeBarCheck, reindexFooCheck, everythingIsFineCheck, unreachableCheck // This one should never be called )); } else { - service = new IndexUpgradeService(Settings.EMPTY, Arrays.asList( + service = new IndexUpgradeService(Arrays.asList( reindexFooCheck, upgradeBarCheck, everythingIsFineCheck, @@ -93,7 +93,7 @@ public class IndexUpgradeServiceTests extends ESTestCase { public void testNoMatchingChecks() throws Exception { - IndexUpgradeService service = new IndexUpgradeService(Settings.EMPTY, Arrays.asList( + IndexUpgradeService service = new IndexUpgradeService(Arrays.asList( upgradeBarCheck, reindexFooCheck )); @@ -113,7 +113,7 @@ public class IndexUpgradeServiceTests extends ESTestCase { } public void testEarlierChecksWin() throws Exception { - IndexUpgradeService service = new IndexUpgradeService(Settings.EMPTY, Arrays.asList( + IndexUpgradeService service = new IndexUpgradeService(Arrays.asList( everythingIsFineCheck, upgradeBarCheck, reindexFooCheck @@ -132,7 +132,7 @@ public class IndexUpgradeServiceTests extends ESTestCase { } public void testGenericTest() throws Exception { - IndexUpgradeService service = new IndexUpgradeService(Settings.EMPTY, Arrays.asList( + IndexUpgradeService service = new IndexUpgradeService(Arrays.asList( upgradeBarCheck, reindexFooCheck )); diff --git a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeTasksIT.java b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeTasksIT.java index 10dc126f731..1b9ad0a79fc 100644 --- a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeTasksIT.java +++ b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeTasksIT.java @@ -97,7 +97,7 @@ public class IndexUpgradeTasksIT extends ESIntegTestCase { public MockUpgradePlugin(Settings settings) { this.settings = settings; - this.upgrade = new Upgrade(settings); + this.upgrade = new Upgrade(); LogManager.getLogger(IndexUpgradeTasksIT.class).info("MockUpgradePlugin is created"); } @@ -121,8 +121,8 @@ public class IndexUpgradeTasksIT extends ESIntegTestCase { ResourceWatcherService resourceWatcherService, ScriptService scriptService, NamedXContentRegistry xContentRegistry, Environment environment, NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { - return Arrays.asList(new IndexUpgradeService(settings, Collections.singletonList( - new IndexUpgradeCheck("test", settings, + return Arrays.asList(new IndexUpgradeService(Collections.singletonList( + new IndexUpgradeCheck("test", new Function() { @Override public UpgradeActionRequired apply(IndexMetaData indexMetaData) { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java index f87d9454f2d..ce9db36eef0 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java @@ -272,7 +272,7 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa throw new UncheckedIOException(e); } - new WatcherIndexTemplateRegistry(settings, clusterService, threadPool, client); + new WatcherIndexTemplateRegistry(clusterService, threadPool, client); // http client httpClient = new HttpClient(settings, getSslService(), cryptoService); @@ -290,7 +290,7 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa reloadableServices.add(slackService); reloadableServices.add(pagerDutyService); - TextTemplateEngine templateEngine = new TextTemplateEngine(settings, scriptService); + TextTemplateEngine templateEngine = new TextTemplateEngine(scriptService); Map emailAttachmentParsers = new HashMap<>(); emailAttachmentParsers.put(HttpEmailAttachementParser.TYPE, new HttpEmailAttachementParser(httpClient, templateEngine)); emailAttachmentParsers.put(DataAttachmentParser.TYPE, new DataAttachmentParser()); @@ -376,7 +376,7 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa .setConcurrentRequests(SETTING_BULK_CONCURRENT_REQUESTS.get(settings)) .build(); - HistoryStore historyStore = new HistoryStore(settings, bulkProcessor); + HistoryStore historyStore = new HistoryStore(bulkProcessor); // schedulers final Set scheduleParsers = new HashSet<>(); @@ -395,15 +395,15 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa final Set triggerEngines = new HashSet<>(); triggerEngines.add(manualTriggerEngine); triggerEngines.add(configuredTriggerEngine); - final TriggerService triggerService = new TriggerService(settings, triggerEngines); + final TriggerService triggerService = new TriggerService(triggerEngines); - final TriggeredWatch.Parser triggeredWatchParser = new TriggeredWatch.Parser(settings, triggerService); + final TriggeredWatch.Parser triggeredWatchParser = new TriggeredWatch.Parser(triggerService); final TriggeredWatchStore triggeredWatchStore = new TriggeredWatchStore(settings, client, triggeredWatchParser, bulkProcessor); final WatcherSearchTemplateService watcherSearchTemplateService = - new WatcherSearchTemplateService(settings, scriptService, xContentRegistry); + new WatcherSearchTemplateService(scriptService, xContentRegistry); final WatchExecutor watchExecutor = getWatchExecutor(threadPool); - final WatchParser watchParser = new WatchParser(settings, triggerService, registry, inputRegistry, cryptoService, getClock()); + final WatchParser watchParser = new WatchParser(triggerService, registry, inputRegistry, cryptoService, getClock()); final ExecutionService executionService = new ExecutionService(settings, historyStore, triggeredWatchStore, watchExecutor, getClock(), watchParser, clusterService, client, threadPool.generic()); @@ -415,9 +415,9 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa watchParser, client); final WatcherLifeCycleService watcherLifeCycleService = - new WatcherLifeCycleService(settings, clusterService, watcherService); + new WatcherLifeCycleService(clusterService, watcherService); - listener = new WatcherIndexingListener(settings, watchParser, getClock(), triggerService); + listener = new WatcherIndexingListener(watchParser, getClock(), triggerService); clusterService.addListener(listener); return Arrays.asList(registry, inputRegistry, historyStore, triggerService, triggeredWatchParser, diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java index 086528054bc..156a6e5de28 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java @@ -19,7 +19,6 @@ import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.engine.Engine; @@ -65,8 +64,7 @@ final class WatcherIndexingListener extends AbstractComponent implements Indexin private final TriggerService triggerService; private volatile Configuration configuration = INACTIVE; - WatcherIndexingListener(Settings settings, WatchParser parser, Clock clock, TriggerService triggerService) { - super(settings); + WatcherIndexingListener(WatchParser parser, Clock clock, TriggerService triggerService) { this.parser = parser; this.clock = clock; this.triggerService = triggerService; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java index a67101e3587..1bf9c4ded60 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java @@ -17,7 +17,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.LifecycleListener; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.xpack.core.watcher.WatcherMetaData; @@ -42,8 +41,7 @@ public class WatcherLifeCycleService extends AbstractComponent implements Cluste private volatile boolean shutDown = false; // indicates that the node has been shutdown and we should never start watcher after this. private volatile WatcherService watcherService; - WatcherLifeCycleService(Settings settings, ClusterService clusterService, WatcherService watcherService) { - super(settings); + WatcherLifeCycleService(ClusterService clusterService, WatcherService watcherService) { this.watcherService = watcherService; clusterService.addListener(this); // Close if the indices service is being stopped, so we don't run into search failures (locally) that will diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java index 75fd13915de..f26ab5a14fb 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java @@ -83,7 +83,6 @@ public class WatcherService extends AbstractComponent { WatcherService(Settings settings, TriggerService triggerService, TriggeredWatchStore triggeredWatchStore, ExecutionService executionService, WatchParser parser, Client client, ExecutorService executor) { - super(settings); this.triggerService = triggerService; this.triggeredWatchStore = triggeredWatchStore; this.executionService = executionService; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java index b4f248190d8..2a327851558 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java @@ -75,7 +75,6 @@ public class HttpClient extends AbstractComponent implements Closeable { private final CryptoService cryptoService; public HttpClient(Settings settings, SSLService sslService, CryptoService cryptoService) { - super(settings); this.defaultConnectionTimeout = HttpSettings.CONNECTION_TIMEOUT.get(settings); this.defaultReadTimeout = HttpSettings.READ_TIMEOUT.get(settings); this.maxResponseSize = HttpSettings.MAX_HTTP_RESPONSE_SIZE.get(settings); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateEngine.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateEngine.java index b7198f90c59..2f2d3d7b9f3 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateEngine.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateEngine.java @@ -5,8 +5,6 @@ */ package org.elasticsearch.xpack.watcher.common.text; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; @@ -17,12 +15,11 @@ import org.elasticsearch.xpack.watcher.Watcher; import java.util.HashMap; import java.util.Map; -public class TextTemplateEngine extends AbstractComponent { +public class TextTemplateEngine { private final ScriptService service; - public TextTemplateEngine(Settings settings, ScriptService service) { - super(settings); + public TextTemplateEngine(ScriptService service) { this.service = service; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java index 7a0b7f14bca..2beafa9be28 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java @@ -101,7 +101,6 @@ public class ExecutionService extends AbstractComponent { public ExecutionService(Settings settings, HistoryStore historyStore, TriggeredWatchStore triggeredWatchStore, WatchExecutor executor, Clock clock, WatchParser parser, ClusterService clusterService, Client client, ExecutorService genericExecutor) { - super(settings); this.historyStore = historyStore; this.triggeredWatchStore = triggeredWatchStore; this.executor = executor; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatch.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatch.java index 51998a14bd7..91475f49971 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatch.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatch.java @@ -8,8 +8,6 @@ package org.elasticsearch.xpack.watcher.execution; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContentObject; @@ -48,12 +46,11 @@ public class TriggeredWatch implements ToXContentObject { return builder; } - public static class Parser extends AbstractComponent { + public static class Parser { private final TriggerService triggerService; - public Parser(Settings settings, TriggerService triggerService) { - super(settings); + public Parser(TriggerService triggerService) { this.triggerService = triggerService; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java index 9a4b555d633..049a7ff200e 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java @@ -60,7 +60,6 @@ public class TriggeredWatchStore extends AbstractComponent { private final BulkProcessor bulkProcessor; public TriggeredWatchStore(Settings settings, Client client, TriggeredWatch.Parser triggeredWatchParser, BulkProcessor bulkProcessor) { - super(settings); this.scrollSize = settings.getAsInt("xpack.watcher.execution.scroll.size", 1000); this.client = ClientHelper.clientWithOrigin(client, WATCHER_ORIGIN); this.scrollTimeout = settings.getAsTime("xpack.watcher.execution.scroll.timeout", TimeValue.timeValueMinutes(5)); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/history/HistoryStore.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/history/HistoryStore.java index 723568f8ba7..a20eef6b489 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/history/HistoryStore.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/history/HistoryStore.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField; @@ -32,8 +31,7 @@ public class HistoryStore extends AbstractComponent { private final BulkProcessor bulkProcessor; - public HistoryStore(Settings settings, BulkProcessor bulkProcessor) { - super(settings); + public HistoryStore(BulkProcessor bulkProcessor) { this.bulkProcessor = bulkProcessor; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/SearchInputFactory.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/SearchInputFactory.java index e6c78f6ac02..9ca790c3df5 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/SearchInputFactory.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/SearchInputFactory.java @@ -26,7 +26,7 @@ public class SearchInputFactory extends InputFactory extends AbstractComponent { private Map accounts; private Account defaultAccount; - public NotificationService(Settings settings, String type, + public NotificationService(String type, ClusterSettings clusterSettings, List> pluginSettings) { - this(settings, type); + this(type); clusterSettings.addSettingsUpdateConsumer(this::reload, pluginSettings); } // Used for testing only - NotificationService(Settings settings, String type) { - super(settings); + NotificationService(String type) { this.type = type; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java index e45ed55cee3..70922e57bd0 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java @@ -104,7 +104,7 @@ public class EmailService extends NotificationService { private final CryptoService cryptoService; public EmailService(Settings settings, @Nullable CryptoService cryptoService, ClusterSettings clusterSettings) { - super(settings, "email", clusterSettings, EmailService.getSettings()); + super("email", clusterSettings, EmailService.getSettings()); this.cryptoService = cryptoService; // ensure logging of setting changes clusterSettings.addSettingsUpdateConsumer(SETTING_DEFAULT_ACCOUNT, (s) -> {}); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java index 2f21c2299a9..58840aec977 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java @@ -65,7 +65,7 @@ public class HipChatService extends NotificationService { private HipChatServer defaultServer; public HipChatService(Settings settings, HttpClient httpClient, ClusterSettings clusterSettings) { - super(settings, "hipchat", clusterSettings, HipChatService.getSettings()); + super("hipchat", clusterSettings, HipChatService.getSettings()); this.httpClient = httpClient; // ensure logging of setting changes clusterSettings.addSettingsUpdateConsumer(SETTING_DEFAULT_ACCOUNT, (s) -> {}); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java index 49c05f36b24..d7b7fe2003b 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java @@ -62,7 +62,7 @@ public class JiraService extends NotificationService { private final HttpClient httpClient; public JiraService(Settings settings, HttpClient httpClient, ClusterSettings clusterSettings) { - super(settings, "jira", clusterSettings, JiraService.getSettings()); + super("jira", clusterSettings, JiraService.getSettings()); this.httpClient = httpClient; // ensure logging of setting changes clusterSettings.addSettingsUpdateConsumer(SETTING_DEFAULT_ACCOUNT, (s) -> {}); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java index c10bcf4782f..6834fcd4e2e 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java @@ -40,7 +40,7 @@ public class PagerDutyService extends NotificationService { private final HttpClient httpClient; public PagerDutyService(Settings settings, HttpClient httpClient, ClusterSettings clusterSettings) { - super(settings, "pagerduty", clusterSettings, PagerDutyService.getSettings()); + super("pagerduty", clusterSettings, PagerDutyService.getSettings()); this.httpClient = httpClient; clusterSettings.addSettingsUpdateConsumer(SETTING_DEFAULT_ACCOUNT, (s) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SERVICE_API_KEY, (s, o) -> {}, (s, o) -> {}); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java index 2a38e08d599..888da55430a 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java @@ -40,7 +40,7 @@ public class SlackService extends NotificationService { private final HttpClient httpClient; public SlackService(Settings settings, HttpClient httpClient, ClusterSettings clusterSettings) { - super(settings, "slack", clusterSettings, SlackService.getSettings()); + super("slack", clusterSettings, SlackService.getSettings()); this.httpClient = httpClient; clusterSettings.addSettingsUpdateConsumer(SETTING_DEFAULT_ACCOUNT, (s) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_URL, (s, o) -> {}, (s, o) -> {}); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatcherStatsAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatcherStatsAction.java index 90c756c1323..fad5b9cf93f 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatcherStatsAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/rest/action/RestWatcherStatsAction.java @@ -5,6 +5,9 @@ */ package org.elasticsearch.xpack.watcher.rest.action; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestController; @@ -21,6 +24,9 @@ import java.util.Set; import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestWatcherStatsAction extends WatcherRestHandler { + private static final Logger logger = LogManager.getLogger(RestWatcherStatsAction.class); + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(logger); + public RestWatcherStatsAction(Settings settings, RestController controller) { super(settings); controller.registerHandler(GET, URI_BASE + "/stats", this); @@ -41,8 +47,12 @@ public class RestWatcherStatsAction extends WatcherRestHandler { request.includeCurrentWatches(true); request.includeQueuedWatches(true); } else { - request.includeCurrentWatches(metrics.contains("queued_watches")); - request.includeQueuedWatches(metrics.contains("pending_watches")); + request.includeCurrentWatches(metrics.contains("current_watches")); + request.includeQueuedWatches(metrics.contains("queued_watches") || metrics.contains("pending_watches")); + } + + if (metrics.contains("pending_watches")) { + deprecationLogger.deprecated("The pending_watches parameter is deprecated, use queued_watches instead"); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistry.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistry.java index 36ec856bedd..6d0a02062ee 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistry.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistry.java @@ -16,7 +16,6 @@ import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.gateway.GatewayService; @@ -51,8 +50,7 @@ public class WatcherIndexTemplateRegistry extends AbstractComponent implements C private final TemplateConfig[] indexTemplates; private final ConcurrentMap templateCreationsInProgress = new ConcurrentHashMap<>(); - public WatcherIndexTemplateRegistry(Settings settings, ClusterService clusterService, ThreadPool threadPool, Client client) { - super(settings); + public WatcherIndexTemplateRegistry(ClusterService clusterService, ThreadPool threadPool, Client client) { this.client = client; this.threadPool = threadPool; this.indexTemplates = TEMPLATE_CONFIGS; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateService.java index 2208aab428a..d8451fbc7b3 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateService.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.watcher.support.search; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; @@ -36,8 +35,7 @@ public class WatcherSearchTemplateService extends AbstractComponent { private final ScriptService scriptService; private final NamedXContentRegistry xContentRegistry; - public WatcherSearchTemplateService(Settings settings, ScriptService scriptService, NamedXContentRegistry xContentRegistry) { - super(settings); + public WatcherSearchTemplateService(ScriptService scriptService, NamedXContentRegistry xContentRegistry) { this.scriptService = scriptService; this.xContentRegistry = xContentRegistry; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/search/SearchTransformFactory.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/search/SearchTransformFactory.java index 86293668872..72b91d07562 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/search/SearchTransformFactory.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/search/SearchTransformFactory.java @@ -27,7 +27,7 @@ public class SearchTransformFactory extends TransformFactory engines; private final Map perWatchStats = new HashMap<>(); - public TriggerService(Settings settings, Set engines) { - super(settings); + public TriggerService(Set engines) { Map builder = new HashMap<>(); for (TriggerEngine engine : engines) { builder.put(engine.type(), engine); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleTriggerEngine.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleTriggerEngine.java index dfee54c391a..c53162bd7d9 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleTriggerEngine.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/ScheduleTriggerEngine.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.watcher.trigger.schedule; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.watcher.support.WatcherDateTimeUtils; import org.elasticsearch.xpack.core.watcher.trigger.TriggerEvent; @@ -33,8 +32,7 @@ public abstract class ScheduleTriggerEngine extends AbstractComponent implements protected final ScheduleRegistry scheduleRegistry; protected final Clock clock; - public ScheduleTriggerEngine(Settings settings, ScheduleRegistry scheduleRegistry, Clock clock) { - super(settings); + public ScheduleTriggerEngine(ScheduleRegistry scheduleRegistry, Clock clock) { this.scheduleRegistry = scheduleRegistry; this.clock = clock; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java index bd0204766af..2fe74be2b0c 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java @@ -41,7 +41,7 @@ public class TickerScheduleTriggerEngine extends ScheduleTriggerEngine { private Ticker ticker; public TickerScheduleTriggerEngine(Settings settings, ScheduleRegistry scheduleRegistry, Clock clock) { - super(settings, scheduleRegistry, clock); + super(scheduleRegistry, clock); this.tickInterval = TICKER_INTERVAL_SETTING.get(settings); this.schedules = new ConcurrentHashMap<>(); this.ticker = new Ticker(Node.NODE_DATA_SETTING.get(settings)); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchParser.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchParser.java index fe2f80d07f4..6c088029937 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchParser.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/watch/WatchParser.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.lucene.uid.Versions; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -60,9 +59,8 @@ public class WatchParser extends AbstractComponent { private final ExecutableCondition defaultCondition; private final List defaultActions; - public WatchParser(Settings settings, TriggerService triggerService, ActionRegistry actionRegistry, InputRegistry inputRegistry, + public WatchParser(TriggerService triggerService, ActionRegistry actionRegistry, InputRegistry inputRegistry, @Nullable CryptoService cryptoService, Clock clock) { - super(settings); this.triggerService = triggerService; this.actionRegistry = actionRegistry; this.inputRegistry = inputRegistry; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherIndexingListenerTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherIndexingListenerTests.java index 752f97b6b1a..f351ed2e154 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherIndexingListenerTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherIndexingListenerTests.java @@ -86,7 +86,7 @@ public class WatcherIndexingListenerTests extends ESTestCase { @Before public void setup() throws Exception { clock.freeze(); - listener = new WatcherIndexingListener(Settings.EMPTY, parser, clock, triggerService); + listener = new WatcherIndexingListener(parser, clock, triggerService); Map map = new HashMap<>(); map.put(shardId, new ShardAllocationConfiguration(0, 1, Collections.singletonList("foo"))); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleServiceTests.java index 384338af5a2..467966e96fd 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleServiceTests.java @@ -78,7 +78,7 @@ public class WatcherLifeCycleServiceTests extends ESTestCase { }; doAnswer(answer).when(clusterService).submitStateUpdateTask(anyString(), any(ClusterStateUpdateTask.class)); watcherService = mock(WatcherService.class); - lifeCycleService = new WatcherLifeCycleService(Settings.EMPTY, clusterService, watcherService); + lifeCycleService = new WatcherLifeCycleService(clusterService, watcherService); } public void testNoRestartWithoutAllocationIdsConfigured() { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java index 73ad0456524..0f670ea4cde 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java @@ -223,7 +223,7 @@ public class WatcherServiceTests extends ESTestCase { String engineType = "foo"; TriggerEngine triggerEngine = mock(TriggerEngine.class); when(triggerEngine.type()).thenReturn(engineType); - TriggerService triggerService = new TriggerService(Settings.EMPTY, Collections.singleton(triggerEngine)); + TriggerService triggerService = new TriggerService(Collections.singleton(triggerEngine)); Trigger trigger = mock(Trigger.class); when(trigger.type()).thenReturn(engineType); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/jira/ExecutableJiraActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/jira/ExecutableJiraActionTests.java index aa03bf4545e..c4604d8e2a1 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/jira/ExecutableJiraActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/jira/ExecutableJiraActionTests.java @@ -300,7 +300,7 @@ public class ExecutableJiraActionTests extends ESTestCase { class UpperCaseTextTemplateEngine extends TextTemplateEngine { UpperCaseTextTemplateEngine() { - super(Settings.EMPTY, mock(ScriptService.class)); + super(mock(ScriptService.class)); } @Override diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/jira/JiraActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/jira/JiraActionTests.java index 22a6ced9e7e..a0d09e39c02 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/jira/JiraActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/jira/JiraActionTests.java @@ -293,7 +293,7 @@ public class JiraActionTests extends ESTestCase { private final Map model; ModelTextTemplateEngine(Map model) { - super(Settings.EMPTY, mock(ScriptService.class)); + super(mock(ScriptService.class)); this.model = model; } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateTests.java index 002d833c209..ecc071d5981 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateTests.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.watcher.common.text; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; @@ -47,7 +46,7 @@ public class TextTemplateTests extends ESTestCase { @Before public void init() throws Exception { service = mock(ScriptService.class); - engine = new TextTemplateEngine(Settings.EMPTY, service); + engine = new TextTemplateEngine(service); } public void testRender() throws Exception { @@ -131,7 +130,7 @@ public class TextTemplateTests extends ESTestCase { private void assertScriptServiceInvoked(final String input) { ScriptService scriptService = mock(ScriptService.class); - TextTemplateEngine e = new TextTemplateEngine(Settings.EMPTY, scriptService); + TextTemplateEngine e = new TextTemplateEngine(scriptService); TemplateScript.Factory compiledTemplate = templateParams -> new TemplateScript(templateParams) { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java index 4012c8d24b5..428ec96df97 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java @@ -398,10 +398,10 @@ public class TriggeredWatchStoreTests extends ESTestCase { triggeredWatch.toXContent(jsonBuilder, ToXContent.EMPTY_PARAMS); ScheduleRegistry scheduleRegistry = new ScheduleRegistry(Collections.singleton(new CronSchedule.Parser())); - TriggerEngine triggerEngine = new WatchTests.ParseOnlyScheduleTriggerEngine(Settings.EMPTY, scheduleRegistry, new ClockMock()); - TriggerService triggerService = new TriggerService(Settings.EMPTY, singleton(triggerEngine)); + TriggerEngine triggerEngine = new WatchTests.ParseOnlyScheduleTriggerEngine(scheduleRegistry, new ClockMock()); + TriggerService triggerService = new TriggerService(singleton(triggerEngine)); - TriggeredWatch.Parser parser = new TriggeredWatch.Parser(Settings.EMPTY, triggerService); + TriggeredWatch.Parser parser = new TriggeredWatch.Parser(triggerService); TriggeredWatch parsedTriggeredWatch = parser.parse(triggeredWatch.id().value(), 0, BytesReference.bytes(jsonBuilder)); XContentBuilder jsonBuilder2 = XContentFactory.jsonBuilder(); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryStoreTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryStoreTests.java index 19bf1ba5a1f..2ea364de18b 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryStoreTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryStoreTests.java @@ -71,7 +71,7 @@ public class HistoryStoreTests extends ESTestCase { when(threadPool.getThreadContext()).thenReturn(new ThreadContext(settings)); BulkProcessor.Listener listener = mock(BulkProcessor.Listener.class); BulkProcessor bulkProcessor = BulkProcessor.builder(client, listener).setConcurrentRequests(0).setBulkActions(1).build(); - historyStore = new HistoryStore(settings, bulkProcessor); + historyStore = new HistoryStore(bulkProcessor); } public void testPut() throws Exception { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java index cb86913678a..ddf45de8163 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java @@ -81,7 +81,7 @@ public class NotificationServiceTests extends ESTestCase { private static class TestNotificationService extends NotificationService { TestNotificationService(Settings settings) { - super(settings, "test"); + super("test"); reload(settings); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParserTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParserTests.java index 97907bbef8a..d46524b93e5 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParserTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/attachment/ReportingAttachmentParserTests.java @@ -352,7 +352,7 @@ public class ReportingAttachmentParserTests extends ESTestCase { .thenReturn(new HttpResponse(503)) .thenReturn(new HttpResponse(200, randomAlphaOfLength(10))); - TextTemplateEngine replaceHttpWithHttpsTemplateEngine = new TextTemplateEngine(Settings.EMPTY, null) { + TextTemplateEngine replaceHttpWithHttpsTemplateEngine = new TextTemplateEngine(null) { @Override public String render(TextTemplate textTemplate, Map model) { return textTemplate.getTemplate().replaceAll("REPLACEME", "REPLACED"); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistryTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistryTests.java index 3201a69b486..e93a86b93eb 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistryTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherIndexTemplateRegistryTests.java @@ -72,7 +72,7 @@ public class WatcherIndexTemplateRegistryTests extends ESTestCase { }).when(indicesAdminClient).putTemplate(any(PutIndexTemplateRequest.class), any(ActionListener.class)); ClusterService clusterService = mock(ClusterService.class); - registry = new WatcherIndexTemplateRegistry(Settings.EMPTY, clusterService, threadPool, client); + registry = new WatcherIndexTemplateRegistry(clusterService, threadPool, client); } public void testThatNonExistingTemplatesAreAddedImmediately() { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/MockTextTemplateEngine.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/MockTextTemplateEngine.java index eeefa20c3a1..a8db182bda0 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/MockTextTemplateEngine.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/MockTextTemplateEngine.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.watcher.test; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.watcher.common.text.TextTemplate; import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine; @@ -13,7 +12,7 @@ import java.util.Map; public class MockTextTemplateEngine extends TextTemplateEngine { public MockTextTemplateEngine() { - super(Settings.EMPTY, null); + super(null); } @Override diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/TimeWarpedWatcher.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/TimeWarpedWatcher.java index 99cf45e583d..23f7e6e7c79 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/TimeWarpedWatcher.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/TimeWarpedWatcher.java @@ -59,7 +59,7 @@ public class TimeWarpedWatcher extends LocalStateCompositeXPackPlugin { @Override protected TriggerEngine getTriggerEngine(Clock clock, ScheduleRegistry scheduleRegistry){ - return new ScheduleTriggerEngineMock(settings, scheduleRegistry, clock); + return new ScheduleTriggerEngineMock(scheduleRegistry, clock); } @Override diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherExecutorServiceBenchmark.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherExecutorServiceBenchmark.java index 80e802cf817..8a7eb841576 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherExecutorServiceBenchmark.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherExecutorServiceBenchmark.java @@ -212,7 +212,7 @@ public class WatcherExecutorServiceBenchmark { @Override protected TriggerEngine getTriggerEngine(Clock clock, ScheduleRegistry scheduleRegistry) { - return new ScheduleTriggerEngineMock(settings, scheduleRegistry, clock); + return new ScheduleTriggerEngineMock(scheduleRegistry, clock); } } } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java index 5c0562c0a00..8714d894151 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java @@ -189,6 +189,6 @@ public class SearchInputTests extends ESTestCase { private WatcherSearchTemplateService watcherSearchTemplateService() { SearchModule module = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); - return new WatcherSearchTemplateService(Settings.EMPTY, scriptService, new NamedXContentRegistry(module.getNamedXContents())); + return new WatcherSearchTemplateService(scriptService, new NamedXContentRegistry(module.getNamedXContents())); } } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/ScheduleTriggerEngineMock.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/ScheduleTriggerEngineMock.java index 4f0c78b1e0d..9ed7d1a3091 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/ScheduleTriggerEngineMock.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/ScheduleTriggerEngineMock.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.watcher.trigger; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.watcher.watch.ClockMock; @@ -34,8 +33,8 @@ public class ScheduleTriggerEngineMock extends ScheduleTriggerEngine { private final ConcurrentMap watches = new ConcurrentHashMap<>(); - public ScheduleTriggerEngineMock(Settings settings, ScheduleRegistry scheduleRegistry, Clock clock) { - super(settings, scheduleRegistry, clock); + public ScheduleTriggerEngineMock(ScheduleRegistry scheduleRegistry, Clock clock) { + super(scheduleRegistry, clock); } @Override diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/TriggerServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/TriggerServiceTests.java index f0d3e88b127..6372823d36d 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/TriggerServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/TriggerServiceTests.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.watcher.trigger; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.actions.ActionWrapper; import org.elasticsearch.xpack.core.watcher.actions.ExecutableAction; @@ -39,7 +38,7 @@ public class TriggerServiceTests extends ESTestCase { public void setupTriggerService() { TriggerEngine triggerEngine = mock(TriggerEngine.class); when(triggerEngine.type()).thenReturn(ENGINE_TYPE); - service = new TriggerService(Settings.EMPTY, Collections.singleton(triggerEngine)); + service = new TriggerService(Collections.singleton(triggerEngine)); // simple watch, input and simple action watch1 = createWatch("1"); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java index 91b5378a454..daf1f18f3bd 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java @@ -184,8 +184,8 @@ public class WatchTests extends ESTestCase { Schedule schedule = randomSchedule(); Trigger trigger = new ScheduleTrigger(schedule); ScheduleRegistry scheduleRegistry = registry(schedule); - TriggerEngine triggerEngine = new ParseOnlyScheduleTriggerEngine(Settings.EMPTY, scheduleRegistry, clock); - TriggerService triggerService = new TriggerService(Settings.EMPTY, singleton(triggerEngine)); + TriggerEngine triggerEngine = new ParseOnlyScheduleTriggerEngine(scheduleRegistry, clock); + TriggerService triggerService = new TriggerService(singleton(triggerEngine)); ExecutableInput input = randomInput(); InputRegistry inputRegistry = registry(input.type()); @@ -212,7 +212,7 @@ public class WatchTests extends ESTestCase { BytesReference bytes = BytesReference.bytes(jsonBuilder().value(watch)); logger.info("{}", bytes.utf8ToString()); - WatchParser watchParser = new WatchParser(settings, triggerService, actionRegistry, inputRegistry, null, clock); + WatchParser watchParser = new WatchParser(triggerService, actionRegistry, inputRegistry, null, clock); Watch parsedWatch = watchParser.parse("_name", includeStatus, bytes, XContentType.JSON); @@ -233,7 +233,7 @@ public class WatchTests extends ESTestCase { InputRegistry inputRegistry = mock(InputRegistry.class); ActionRegistry actionRegistry = mock(ActionRegistry.class); // a fake trigger service that advances past the trigger end object, which cannot be done with mocking - TriggerService triggerService = new TriggerService(Settings.EMPTY, Collections.emptySet()) { + TriggerService triggerService = new TriggerService(Collections.emptySet()) { @Override public Trigger parseTrigger(String jobName, XContentParser parser) throws IOException { while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -254,7 +254,7 @@ public class WatchTests extends ESTestCase { } WatchStatus watchStatus = new WatchStatus(new DateTime(clock.millis()), unmodifiableMap(actionsStatuses)); - WatchParser watchParser = new WatchParser(settings, triggerService, actionRegistry, inputRegistry, null, clock); + WatchParser watchParser = new WatchParser(triggerService, actionRegistry, inputRegistry, null, clock); XContentBuilder builder = jsonBuilder().startObject().startObject("trigger").endObject().field("status", watchStatus).endObject(); Watch watch = watchParser.parse("foo", true, BytesReference.bytes(builder), XContentType.JSON); assertThat(watch.status().state().getTimestamp().getMillis(), is(clock.millis())); @@ -266,8 +266,8 @@ public class WatchTests extends ESTestCase { public void testParserBadActions() throws Exception { ClockMock clock = ClockMock.frozen(); ScheduleRegistry scheduleRegistry = registry(randomSchedule()); - TriggerEngine triggerEngine = new ParseOnlyScheduleTriggerEngine(Settings.EMPTY, scheduleRegistry, clock); - TriggerService triggerService = new TriggerService(Settings.EMPTY, singleton(triggerEngine)); + TriggerEngine triggerEngine = new ParseOnlyScheduleTriggerEngine(scheduleRegistry, clock); + TriggerService triggerService = new TriggerService(singleton(triggerEngine)); ConditionRegistry conditionRegistry = conditionRegistry(); ExecutableInput input = randomInput(); InputRegistry inputRegistry = registry(input.type()); @@ -282,7 +282,7 @@ public class WatchTests extends ESTestCase { .startObject() .startArray("actions").endArray() .endObject(); - WatchParser watchParser = new WatchParser(settings, triggerService, actionRegistry, inputRegistry, null, clock); + WatchParser watchParser = new WatchParser(triggerService, actionRegistry, inputRegistry, null, clock); try { watchParser.parse("failure", false, BytesReference.bytes(jsonBuilder), XContentType.JSON); fail("This watch should fail to parse as actions is an array"); @@ -294,8 +294,8 @@ public class WatchTests extends ESTestCase { public void testParserDefaults() throws Exception { Schedule schedule = randomSchedule(); ScheduleRegistry scheduleRegistry = registry(schedule); - TriggerEngine triggerEngine = new ParseOnlyScheduleTriggerEngine(Settings.EMPTY, scheduleRegistry, Clock.systemUTC()); - TriggerService triggerService = new TriggerService(Settings.EMPTY, singleton(triggerEngine)); + TriggerEngine triggerEngine = new ParseOnlyScheduleTriggerEngine(scheduleRegistry, Clock.systemUTC()); + TriggerService triggerService = new TriggerService(singleton(triggerEngine)); ConditionRegistry conditionRegistry = conditionRegistry(); InputRegistry inputRegistry = registry(new ExecutableNoneInput().type()); @@ -308,7 +308,7 @@ public class WatchTests extends ESTestCase { .field(ScheduleTrigger.TYPE, schedule(schedule).build()) .endObject(); builder.endObject(); - WatchParser watchParser = new WatchParser(settings, triggerService, actionRegistry, inputRegistry, null, Clock.systemUTC()); + WatchParser watchParser = new WatchParser(triggerService, actionRegistry, inputRegistry, null, Clock.systemUTC()); Watch watch = watchParser.parse("failure", false, BytesReference.bytes(builder), XContentType.JSON); assertThat(watch, notNullValue()); assertThat(watch.trigger(), instanceOf(ScheduleTrigger.class)); @@ -322,16 +322,16 @@ public class WatchTests extends ESTestCase { public void testParseWatch_verifyScriptLangDefault() throws Exception { ScheduleRegistry scheduleRegistry = registry(new IntervalSchedule(new IntervalSchedule.Interval(1, IntervalSchedule.Interval.Unit.SECONDS))); - TriggerEngine triggerEngine = new ParseOnlyScheduleTriggerEngine(Settings.EMPTY, scheduleRegistry, Clock.systemUTC()); - TriggerService triggerService = new TriggerService(Settings.EMPTY, singleton(triggerEngine)); + TriggerEngine triggerEngine = new ParseOnlyScheduleTriggerEngine(scheduleRegistry, Clock.systemUTC()); + TriggerService triggerService = new TriggerService(singleton(triggerEngine)); ConditionRegistry conditionRegistry = conditionRegistry(); InputRegistry inputRegistry = registry(SearchInput.TYPE); TransformRegistry transformRegistry = transformRegistry(); ActionRegistry actionRegistry = registry(Collections.emptyList(), conditionRegistry, transformRegistry); - WatchParser watchParser = new WatchParser(settings, triggerService, actionRegistry, inputRegistry, null, Clock.systemUTC()); + WatchParser watchParser = new WatchParser(triggerService, actionRegistry, inputRegistry, null, Clock.systemUTC()); - WatcherSearchTemplateService searchTemplateService = new WatcherSearchTemplateService(settings, scriptService, xContentRegistry()); + WatcherSearchTemplateService searchTemplateService = new WatcherSearchTemplateService(scriptService, xContentRegistry()); XContentBuilder builder = jsonBuilder(); builder.startObject(); @@ -441,15 +441,15 @@ public class WatchTests extends ESTestCase { ScheduleRegistry scheduleRegistry = registry(new IntervalSchedule(new IntervalSchedule.Interval(1, IntervalSchedule.Interval.Unit.SECONDS))); - TriggerEngine triggerEngine = new ParseOnlyScheduleTriggerEngine(Settings.EMPTY, scheduleRegistry, Clock.systemUTC()); - TriggerService triggerService = new TriggerService(Settings.EMPTY, singleton(triggerEngine)); + TriggerEngine triggerEngine = new ParseOnlyScheduleTriggerEngine(scheduleRegistry, Clock.systemUTC()); + TriggerService triggerService = new TriggerService(singleton(triggerEngine)); ConditionRegistry conditionRegistry = conditionRegistry(); InputRegistry inputRegistry = registry(SimpleInput.TYPE); TransformRegistry transformRegistry = transformRegistry(); ActionRegistry actionRegistry = registry(actions, conditionRegistry, transformRegistry); - return new WatchParser(settings, triggerService, actionRegistry, inputRegistry, null, Clock.systemUTC()); + return new WatchParser(triggerService, actionRegistry, inputRegistry, null, Clock.systemUTC()); } private static Schedule randomSchedule() { @@ -645,8 +645,8 @@ public class WatchTests extends ESTestCase { public static class ParseOnlyScheduleTriggerEngine extends ScheduleTriggerEngine { - public ParseOnlyScheduleTriggerEngine(Settings settings, ScheduleRegistry registry, Clock clock) { - super(settings, registry, clock); + public ParseOnlyScheduleTriggerEngine(ScheduleRegistry registry, Clock clock) { + super(registry, clock); } @Override diff --git a/x-pack/qa/full-cluster-restart/build.gradle b/x-pack/qa/full-cluster-restart/build.gradle index c0fb7eb2b77..58d420836f9 100644 --- a/x-pack/qa/full-cluster-restart/build.gradle +++ b/x-pack/qa/full-cluster-restart/build.gradle @@ -237,7 +237,7 @@ subprojects { // basic integ tests includes testing bwc against the most recent version task integTest { if (project.bwc_tests_enabled) { - for (final def version : bwcVersions.snapshotsIndexCompatible) { + for (final def version : bwcVersions.unreleasedIndexCompatible) { dependsOn "v${version}#bwcTest" } } diff --git a/x-pack/qa/rolling-upgrade-basic/build.gradle b/x-pack/qa/rolling-upgrade-basic/build.gradle index 5774e5d7856..0c232e6b10b 100644 --- a/x-pack/qa/rolling-upgrade-basic/build.gradle +++ b/x-pack/qa/rolling-upgrade-basic/build.gradle @@ -125,7 +125,7 @@ test.enabled = false // no unit tests for rolling upgrades, only the rest integr // basic integ tests includes testing bwc against the most recent version task integTest { if (project.bwc_tests_enabled) { - for (final def version : bwcVersions.snapshotsWireCompatible) { + for (final def version : bwcVersions.unreleasedWireCompatible) { dependsOn "v${version}#bwcTest" } } diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index 90da6cf4e58..0fe80c49a94 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -281,7 +281,7 @@ subprojects { // basic integ tests includes testing bwc against the most recent version task integTest { if (project.bwc_tests_enabled) { - for (final def version : bwcVersions.snapshotsWireCompatible) { + for (final def version : bwcVersions.unreleasedWireCompatible) { dependsOn "v${version}#bwcTest" } } diff --git a/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/ExampleSecurityExtension.java b/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/ExampleSecurityExtension.java index 90b5eefcb56..3f85d8086d6 100644 --- a/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/ExampleSecurityExtension.java +++ b/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/ExampleSecurityExtension.java @@ -56,11 +56,11 @@ public class ExampleSecurityExtension implements SecurityExtension { @Override public List, ActionListener>> getRolesProviders(Settings settings, ResourceWatcherService resourceWatcherService) { - CustomInMemoryRolesProvider rp1 = new CustomInMemoryRolesProvider(settings, Collections.singletonMap(ROLE_A, "read")); + CustomInMemoryRolesProvider rp1 = new CustomInMemoryRolesProvider(Collections.singletonMap(ROLE_A, "read")); Map roles = new HashMap<>(); roles.put(ROLE_A, "all"); roles.put(ROLE_B, "all"); - CustomInMemoryRolesProvider rp2 = new CustomInMemoryRolesProvider(settings, roles); + CustomInMemoryRolesProvider rp2 = new CustomInMemoryRolesProvider(roles); return Arrays.asList(rp1, rp2); } } diff --git a/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/role/CustomInMemoryRolesProvider.java b/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/role/CustomInMemoryRolesProvider.java index 0d5a71e6244..44616054dc7 100644 --- a/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/role/CustomInMemoryRolesProvider.java +++ b/x-pack/qa/security-example-spi-extension/src/main/java/org/elasticsearch/example/role/CustomInMemoryRolesProvider.java @@ -7,7 +7,6 @@ package org.elasticsearch.example.role; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.store.RoleRetrievalResult; @@ -30,8 +29,7 @@ public class CustomInMemoryRolesProvider private final Map rolePermissionSettings; - public CustomInMemoryRolesProvider(Settings settings, Map rolePermissionSettings) { - super(settings); + public CustomInMemoryRolesProvider(Map rolePermissionSettings) { this.rolePermissionSettings = rolePermissionSettings; } diff --git a/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/WatcherTemplateIT.java b/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/WatcherTemplateIT.java index feeea248712..e9c5106d44e 100644 --- a/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/WatcherTemplateIT.java +++ b/x-pack/qa/smoke-test-watcher/src/test/java/org/elasticsearch/smoketest/WatcherTemplateIT.java @@ -41,7 +41,7 @@ public class WatcherTemplateIT extends ESTestCase { Map> contexts = Collections.singletonMap(Watcher.SCRIPT_TEMPLATE_CONTEXT.name, Watcher.SCRIPT_TEMPLATE_CONTEXT); ScriptService scriptService = new ScriptService(Settings.EMPTY, engines, contexts); - textTemplateEngine = new TextTemplateEngine(Settings.EMPTY, scriptService); + textTemplateEngine = new TextTemplateEngine(scriptService); } public void testEscaping() throws Exception {