Merge remote-tracking branch 'upstream/master' into index-lifecycle

This commit is contained in:
Tal Levy 2018-11-01 09:05:41 -07:00
commit c3cf7dd305
497 changed files with 3401 additions and 2712 deletions

View File

@ -46,10 +46,6 @@ public final class Allocators {
private static class NoopGatewayAllocator extends GatewayAllocator { private static class NoopGatewayAllocator extends GatewayAllocator {
public static final NoopGatewayAllocator INSTANCE = new NoopGatewayAllocator(); public static final NoopGatewayAllocator INSTANCE = new NoopGatewayAllocator();
protected NoopGatewayAllocator() {
super(Settings.EMPTY);
}
@Override @Override
public void applyStartedShards(RoutingAllocation allocation, List<ShardRouting> startedShards) { public void applyStartedShards(RoutingAllocation allocation, List<ShardRouting> startedShards) {
// noop // noop
@ -79,7 +75,7 @@ public final class Allocators {
public static AllocationService createAllocationService(Settings settings, ClusterSettings clusterSettings) throws public static AllocationService createAllocationService(Settings settings, ClusterSettings clusterSettings) throws
InvocationTargetException, NoSuchMethodException, InstantiationException, IllegalAccessException { InvocationTargetException, NoSuchMethodException, InstantiationException, IllegalAccessException {
return new AllocationService(settings, return new AllocationService(
defaultAllocationDeciders(settings, clusterSettings), defaultAllocationDeciders(settings, clusterSettings),
NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(settings), EmptyClusterInfoService.INSTANCE); NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(settings), EmptyClusterInfoService.INSTANCE);
} }
@ -88,7 +84,7 @@ public final class Allocators {
IllegalAccessException, InvocationTargetException, InstantiationException, NoSuchMethodException { IllegalAccessException, InvocationTargetException, InstantiationException, NoSuchMethodException {
Collection<AllocationDecider> deciders = Collection<AllocationDecider> deciders =
ClusterModule.createAllocationDeciders(settings, clusterSettings, Collections.emptyList()); ClusterModule.createAllocationDeciders(settings, clusterSettings, Collections.emptyList());
return new AllocationDeciders(settings, deciders); return new AllocationDeciders(deciders);
} }

View File

@ -103,10 +103,6 @@ subprojects {
* in a branch if there are only betas and rcs in the branch so we have * in a branch if there are only betas and rcs in the branch so we have
* *something* to test against. */ * *something* to test against. */
VersionCollection versions = new VersionCollection(file('server/src/main/java/org/elasticsearch/Version.java').readLines('UTF-8')) VersionCollection versions = new VersionCollection(file('server/src/main/java/org/elasticsearch/Version.java').readLines('UTF-8'))
if (versions.currentVersion != VersionProperties.elasticsearch) {
throw new GradleException("The last version in Versions.java [${versions.currentVersion}] does not match " +
"VersionProperties.elasticsearch [${VersionProperties.elasticsearch}]")
}
// build metadata from previous build, contains eg hashes for bwc builds // build metadata from previous build, contains eg hashes for bwc builds
String buildMetadataValue = System.getenv('BUILD_METADATA') String buildMetadataValue = System.getenv('BUILD_METADATA')
@ -140,26 +136,16 @@ task verifyVersions {
if (gradle.startParameter.isOffline()) { if (gradle.startParameter.isOffline()) {
throw new GradleException("Must run in online mode to verify versions") throw new GradleException("Must run in online mode to verify versions")
} }
// Read the list from maven central // Read the list from maven central.
Node xml // Fetch the metadata an parse the xml into Version instances because it's more straight forward here
// rather than bwcVersion ( VersionCollection ).
new URL('https://repo1.maven.org/maven2/org/elasticsearch/elasticsearch/maven-metadata.xml').openStream().withStream { s -> new URL('https://repo1.maven.org/maven2/org/elasticsearch/elasticsearch/maven-metadata.xml').openStream().withStream { s ->
xml = new XmlParser().parse(s) bwcVersions.compareToAuthoritative(
} new XmlParser().parse(s)
Set<Version> knownVersions = new TreeSet<>(xml.versioning.versions.version.collect { it.text() }.findAll { it ==~ /\d+\.\d+\.\d+/ }.collect { Version.fromString(it) }) .versioning.versions.version
.collect { it.text() }.findAll { it ==~ /\d+\.\d+\.\d+/ }
// Limit the known versions to those that should be index compatible, and are not future versions .collect { Version.fromString(it) }
knownVersions = knownVersions.findAll { it.major >= bwcVersions.currentVersion.major - 1 && it.before(VersionProperties.elasticsearch) } )
/* Limit the listed versions to those that have been marked as released.
* Versions not marked as released don't get the same testing and we want
* to make sure that we flip all unreleased versions to released as soon
* as possible after release. */
Set<Version> actualVersions = new TreeSet<>(bwcVersions.indexCompatible.findAll { false == it.snapshot })
// Finally, compare!
if (knownVersions.equals(actualVersions) == false) {
throw new GradleException("out-of-date released versions\nActual :" + actualVersions + "\nExpected:" + knownVersions +
"\nUpdate Version.java. Note that Version.CURRENT doesn't count because it is not released.")
} }
} }
} }
@ -251,20 +237,17 @@ subprojects {
"org.elasticsearch.plugin:percolator-client:${version}": ':modules:percolator', "org.elasticsearch.plugin:percolator-client:${version}": ':modules:percolator',
"org.elasticsearch.plugin:rank-eval-client:${version}": ':modules:rank-eval', "org.elasticsearch.plugin:rank-eval-client:${version}": ':modules:rank-eval',
] ]
// substitute unreleased versions with projects that check out and build locally
bwcVersions.snapshotProjectNames.each { snapshotName -> bwcVersions.forPreviousUnreleased { VersionCollection.UnreleasedVersionInfo unreleasedVersion ->
Version snapshot = bwcVersions.getSnapshotForProject(snapshotName) Version unreleased = unreleasedVersion.version
if (snapshot != null ) { String snapshotProject = ":distribution:bwc:${unreleasedVersion.gradleProjectName}"
String snapshotProject = ":distribution:bwc:${snapshotName}" ext.projectSubstitutions["org.elasticsearch.distribution.deb:elasticsearch:${unreleased}"] = snapshotProject
project(snapshotProject).ext.bwcVersion = snapshot ext.projectSubstitutions["org.elasticsearch.distribution.rpm:elasticsearch:${unreleased}"] = snapshotProject
ext.projectSubstitutions["org.elasticsearch.distribution.deb:elasticsearch:${snapshot}"] = snapshotProject ext.projectSubstitutions["org.elasticsearch.distribution.zip:elasticsearch:${unreleased}"] = snapshotProject
ext.projectSubstitutions["org.elasticsearch.distribution.rpm:elasticsearch:${snapshot}"] = snapshotProject if (unreleased.onOrAfter('6.3.0')) {
ext.projectSubstitutions["org.elasticsearch.distribution.zip:elasticsearch:${snapshot}"] = snapshotProject ext.projectSubstitutions["org.elasticsearch.distribution.deb:elasticsearch-oss:${unreleased}"] = snapshotProject
if (snapshot.onOrAfter('6.3.0')) { ext.projectSubstitutions["org.elasticsearch.distribution.rpm:elasticsearch-oss:${unreleased}"] = snapshotProject
ext.projectSubstitutions["org.elasticsearch.distribution.deb:elasticsearch-oss:${snapshot}"] = snapshotProject ext.projectSubstitutions["org.elasticsearch.distribution.zip:elasticsearch-oss:${unreleased}"] = snapshotProject
ext.projectSubstitutions["org.elasticsearch.distribution.rpm:elasticsearch-oss:${snapshot}"] = snapshotProject
ext.projectSubstitutions["org.elasticsearch.distribution.zip:elasticsearch-oss:${snapshot}"] = snapshotProject
}
} }
} }

View File

@ -1,353 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle
import org.gradle.api.GradleException
import org.gradle.api.InvalidUserDataException
import java.util.regex.Matcher
/**
* The collection of version constants declared in Version.java, for use in BWC testing.
*
* if major+1 released: released artifacts from $version down to major-1.highestMinor.highestPatch, none of these should be snapshots, period.
* if major+1 unreleased:
* - if released:
* -- caveat 0: snapshot for the major-1.highestMinor.highestPatch
* - if unreleased:
* -- caveat 0: snapshot for the major-1.highestMinor.highestPatch
* -- caveat 1: every same major lower minor branch should also be tested if its released, and if not, its a snapshot. There should only be max 2 of these.
* -- caveat 2: the largest released minor branch before the unreleased minor should also be a snapshot
* -- caveat 3: if the current version is a different major than the previous rules apply to major - 1 of the current version
*
* Please note that the caveat's also correspond with the 4 types of snapshots.
* - Caveat 0 - always maintenanceBugfixSnapshot.
* - Caveat 1 - This is tricky. If caveat 3 applies, the highest matching value is nextMinorSnapshot, if there is another it is the stagedMinorSnapshot.
* If caveat 3 does not apply then the only possible value is the stagedMinorSnapshot.
* - Caveat 2 - always nextBugfixSnapshot
* - Caveat 3 - this only changes the applicability of Caveat 1
*
* Notes on terminology:
* - The case for major+1 being released is accomplished through the isReleasableBranch value. If this is false, then the branch is no longer
* releasable, meaning not to test against any snapshots.
* - Released is defined as having > 1 suffix-free version in a major.minor series. For instance, only 6.2.0 means unreleased, but a
* 6.2.0 and 6.2.1 mean that 6.2.0 was released already.
*/
class VersionCollection {
private final List<Version> versions
Version nextMinorSnapshot
Version stagedMinorSnapshot
Version nextBugfixSnapshot
Version maintenanceBugfixSnapshot
final Version currentVersion
private final TreeSet<Version> versionSet = new TreeSet<>()
final List<String> snapshotProjectNames = ['next-minor-snapshot',
'staged-minor-snapshot',
'next-bugfix-snapshot',
'maintenance-bugfix-snapshot']
// When we roll 8.0 its very likely these will need to be extracted from this class
private final boolean isReleasableBranch = true
/**
* Construct a VersionCollection from the lines of the Version.java file. The basic logic for the following is pretty straight forward.
* @param versionLines The lines of the Version.java file.
*/
VersionCollection(List<String> versionLines) {
final boolean buildSnapshot = System.getProperty("build.snapshot", "true") == "true"
List<Version> versions = []
// This class should be converted wholesale to use the treeset
for (final String line : versionLines) {
final Matcher match = line =~ /\W+public static final Version V_(\d+)_(\d+)_(\d+)(_alpha\d+|_beta\d+|_rc\d+)? .*/
if (match.matches()) {
final Version foundVersion = new Version(
Integer.parseInt(match.group(1)), Integer.parseInt(match.group(2)),
Integer.parseInt(match.group(3)), (match.group(4) ?: '').replace('_', '-'), false)
safeAddToSet(foundVersion)
}
}
if (versionSet.empty) {
throw new GradleException("Unexpectedly found no version constants in Versions.java")
}
// If the major version has been released, then remove all of the alpha/beta/rc versions that exist in the set
versionSet.removeAll { it.suffix.isEmpty() == false && isMajorReleased(it, versionSet) }
// set currentVersion
Version lastVersion = versionSet.last()
currentVersion = new Version(lastVersion.major, lastVersion.minor, lastVersion.revision, lastVersion.suffix, buildSnapshot)
// remove all of the potential alpha/beta/rc from the currentVersion
versionSet.removeAll {
it.suffix.isEmpty() == false &&
it.major == currentVersion.major &&
it.minor == currentVersion.minor &&
it.revision == currentVersion.revision }
// re-add the currentVersion to the set
versionSet.add(currentVersion)
if (isReleasableBranch) {
if (isReleased(currentVersion)) {
// caveat 0 - if the minor has been released then it only has a maintenance version
// go back 1 version to get the last supported snapshot version of the line, which is a maint bugfix
Version highestMinor = getHighestPreviousMinor(currentVersion.major)
maintenanceBugfixSnapshot = replaceAsSnapshot(highestMinor)
} else {
// caveat 3 - if our currentVersion is a X.0.0, we need to check X-1 minors to see if they are released
if (currentVersion.minor == 0) {
for (Version version: getMinorTips(currentVersion.major - 1)) {
if (isReleased(version) == false) {
// caveat 1 - This should only ever contain 2 non released branches in flight. An example is 6.x is frozen,
// and 6.2 is cut but not yet released there is some simple logic to make sure that in the case of more than 2,
// it will bail. The order is that the minor snapshot is fulfilled first, and then the staged minor snapshot
if (nextMinorSnapshot == null) {
// it has not been set yet
nextMinorSnapshot = replaceAsSnapshot(version)
} else if (stagedMinorSnapshot == null) {
stagedMinorSnapshot = replaceAsSnapshot(version)
} else {
throw new GradleException("More than 2 snapshot version existed for the next minor and staged (frozen) minors.")
}
} else {
// caveat 2 - this is the last minor snap for this major, so replace the highest (last) one of these and break
nextBugfixSnapshot = replaceAsSnapshot(version)
// we only care about the largest minor here, so in the case of 6.1 and 6.0, it will only get 6.1
break
}
}
// caveat 0 - the last supported snapshot of the line is on a version that we don't support (N-2)
maintenanceBugfixSnapshot = null
} else {
// caveat 3 did not apply. version is not a X.0.0, so we are somewhere on a X.Y line
// only check till minor == 0 of the major
for (Version version: getMinorTips(currentVersion.major)) {
if (isReleased(version) == false) {
// caveat 1 - This should only ever contain 0 or 1 branch in flight. An example is 6.x is frozen, and 6.2 is cut
// but not yet released there is some simple logic to make sure that in the case of more than 1, it will bail
if (stagedMinorSnapshot == null) {
stagedMinorSnapshot = replaceAsSnapshot(version)
} else {
throw new GradleException("More than 1 snapshot version existed for the staged (frozen) minors.")
}
} else {
// caveat 2 - this is the last minor snap for this major, so replace the highest (last) one of these and break
nextBugfixSnapshot = replaceAsSnapshot(version)
// we only care about the largest minor here, so in the case of 6.1 and 6.0, it will only get 6.1
break
}
}
// caveat 0 - now dip back 1 version to get the last supported snapshot version of the line
Version highestMinor = getHighestPreviousMinor(currentVersion.major)
maintenanceBugfixSnapshot = replaceAsSnapshot(highestMinor)
}
}
}
this.versions = Collections.unmodifiableList(versionSet.toList())
}
/**
* @return The list of versions read from the Version.java file
*/
List<Version> getVersions() {
return versions
}
/**
* Index compat supports 1 previous entire major version. For instance, any 6.x test for this would test all of 5 up to that 6.x version
*
* @return All earlier versions that should be tested for index BWC with the current version.
*/
List<Version> getIndexCompatible() {
int actualMajor = (currentVersion.major == 5 ? 2 : currentVersion.major - 1)
return versionSet
.tailSet(Version.fromString("${actualMajor}.0.0"))
.headSet(currentVersion)
.asList()
}
/**
* Ensures the types of snapshot are not null and are also in the index compat list
*/
List<Version> getSnapshotsIndexCompatible() {
List<Version> compatSnapshots = []
List<Version> allCompatVersions = getIndexCompatible()
if (allCompatVersions.contains(nextMinorSnapshot)) {
compatSnapshots.add(nextMinorSnapshot)
}
if (allCompatVersions.contains(stagedMinorSnapshot)) {
compatSnapshots.add(stagedMinorSnapshot)
}
if (allCompatVersions.contains(nextBugfixSnapshot)) {
compatSnapshots.add(nextBugfixSnapshot)
}
if (allCompatVersions.contains(maintenanceBugfixSnapshot)) {
compatSnapshots.add(maintenanceBugfixSnapshot)
}
return compatSnapshots
}
/**
* Wire compat supports the last minor of the previous major. For instance, any 6.x test would test 5.6 up to that 6.x version
*
* @return All earlier versions that should be tested for wire BWC with the current version.
*/
List<Version> getWireCompatible() {
// Get the last minor of the previous major
Version lowerBound = getHighestPreviousMinor(currentVersion.major)
return versionSet
.tailSet(Version.fromString("${lowerBound.major}.${lowerBound.minor}.0"))
.headSet(currentVersion)
.toList()
}
/**
* Ensures the types of snapshot are not null and are also in the wire compat list
*/
List<Version> getSnapshotsWireCompatible() {
List<Version> compatSnapshots = []
List<Version> allCompatVersions = getWireCompatible()
if (allCompatVersions.contains(nextMinorSnapshot)) {
compatSnapshots.add(nextMinorSnapshot)
}
if (allCompatVersions.contains(stagedMinorSnapshot)) {
compatSnapshots.add(stagedMinorSnapshot)
}
if (allCompatVersions.contains(nextBugfixSnapshot)) {
compatSnapshots.add(nextBugfixSnapshot)
}
if (allCompatVersions.contains(maintenanceBugfixSnapshot)) {
compatSnapshots.add(maintenanceBugfixSnapshot)
}
// There was no wire compat for the 2.x line
compatSnapshots.removeAll {it.major == 2}
return compatSnapshots
}
/**
* Grabs the proper snapshot based on the name passed in. These names should correspond with gradle project names under bwc. If you
* are editing this if/else it is only because you added another project under :distribution:bwc. Do not modify this method or its
* reasoning for throwing the exception unless you are sure that it will not harm :distribution:bwc.
*/
Version getSnapshotForProject(String snapshotProjectName) {
if (snapshotProjectName == 'next-minor-snapshot') {
return nextMinorSnapshot
} else if (snapshotProjectName == 'staged-minor-snapshot') {
return stagedMinorSnapshot
} else if (snapshotProjectName == 'maintenance-bugfix-snapshot') {
return maintenanceBugfixSnapshot
} else if (snapshotProjectName == 'next-bugfix-snapshot') {
return nextBugfixSnapshot
} else {
throw new InvalidUserDataException("Unsupported project name ${snapshotProjectName}")
}
}
/**
* Uses basic logic about our releases to determine if this version has been previously released
*/
private boolean isReleased(Version version) {
return version.revision > 0
}
/**
* Validates that the count of non suffixed (alpha/beta/rc) versions in a given major to major+1 is greater than 1.
* This means that there is more than just a major.0.0 or major.0.0-alpha in a branch to signify it has been prevously released.
*/
private boolean isMajorReleased(Version version, TreeSet<Version> items) {
return items
.tailSet(Version.fromString("${version.major}.0.0"))
.headSet(Version.fromString("${version.major + 1}.0.0"))
.count { it.suffix.isEmpty() } // count only non suffix'd versions as actual versions that may be released
.intValue() > 1
}
/**
* Gets the largest version previous major version based on the nextMajorVersion passed in.
* If you have a list [5.0.2, 5.1.2, 6.0.1, 6.1.1] and pass in 6 for the nextMajorVersion, it will return you 5.1.2
*/
private Version getHighestPreviousMinor(Integer nextMajorVersion) {
SortedSet<Version> result = versionSet.headSet(Version.fromString("${nextMajorVersion}.0.0"))
return result.isEmpty() ? null : result.last()
}
/**
* Helper function for turning a version into a snapshot version, removing and readding it to the tree
*/
private Version replaceAsSnapshot(Version version) {
versionSet.remove(version)
Version snapshotVersion = new Version(version.major, version.minor, version.revision, version.suffix, true)
safeAddToSet(snapshotVersion)
return snapshotVersion
}
/**
* Safely adds a value to the treeset, or bails if the value already exists.
* @param version
*/
private void safeAddToSet(Version version) {
if (versionSet.add(version) == false) {
throw new GradleException("Versions.java contains duplicate entries for ${version}")
}
}
/**
* Gets the entire set of major.minor.* given those parameters.
*/
private SortedSet<Version> getMinorSetForMajor(Integer major, Integer minor) {
return versionSet
.tailSet(Version.fromString("${major}.${minor}.0"))
.headSet(Version.fromString("${major}.${minor + 1}.0"))
}
/**
* Gets the entire set of major.* to the currentVersion
*/
private SortedSet<Version> getMajorSet(Integer major) {
return versionSet
.tailSet(Version.fromString("${major}.0.0"))
.headSet(currentVersion)
}
/**
* Gets the tip of each minor set and puts it in a list.
*
* examples:
* [1.0.0, 1.1.0, 1.1.1, 1.2.0, 1.3.1] will return [1.0.0, 1.1.1, 1.2.0, 1.3.1]
* [1.0.0, 1.0.1, 1.0.2, 1.0.3, 1.0.4] will return [1.0.4]
*/
private List<Version> getMinorTips(Integer major) {
TreeSet<Version> majorSet = getMajorSet(major)
List<Version> minorList = new ArrayList<>()
for (int minor = majorSet.last().minor; minor >= 0; minor--) {
TreeSet<Version> minorSetInMajor = getMinorSetForMajor(major, minor)
minorList.add(minorSetInMajor.last())
}
return minorList
}
}

View File

@ -21,6 +21,10 @@ public final class Version implements Comparable<Version> {
private static final Pattern pattern = private static final Pattern pattern =
Pattern.compile("(\\d)+\\.(\\d+)\\.(\\d+)(-alpha\\d+|-beta\\d+|-rc\\d+)?(-SNAPSHOT)?"); Pattern.compile("(\\d)+\\.(\\d+)\\.(\\d+)(-alpha\\d+|-beta\\d+|-rc\\d+)?(-SNAPSHOT)?");
public Version(int major, int minor, int revision) {
this(major, minor, revision, "", false);
}
public Version(int major, int minor, int revision, String suffix, boolean snapshot) { public Version(int major, int minor, int revision, String suffix, boolean snapshot) {
Objects.requireNonNull(major, "major version can't be null"); Objects.requireNonNull(major, "major version can't be null");
Objects.requireNonNull(minor, "minor version can't be null"); Objects.requireNonNull(minor, "minor version can't be null");
@ -31,25 +35,8 @@ public final class Version implements Comparable<Version> {
this.snapshot = snapshot; this.snapshot = snapshot;
this.suffix = suffix == null ? "" : suffix; this.suffix = suffix == null ? "" : suffix;
int suffixOffset = 0;
if (this.suffix.isEmpty()) {
// no suffix will be considered smaller, uncomment to change that
// suffixOffset = 100;
} else {
if (this.suffix.contains("alpha")) {
suffixOffset += parseSuffixNumber(this.suffix.substring(6));
} else if (this.suffix.contains("beta")) {
suffixOffset += 25 + parseSuffixNumber(this.suffix.substring(5));
} else if (this.suffix.contains("rc")) {
suffixOffset += 50 + parseSuffixNumber(this.suffix.substring(3));
}
else {
throw new IllegalArgumentException("Suffix must contain one of: alpha, beta or rc");
}
}
// currently snapshot is not taken into account // currently snapshot is not taken into account
this.id = major * 10000000 + minor * 100000 + revision * 1000 + suffixOffset * 10 /*+ (snapshot ? 1 : 0)*/; this.id = major * 10000000 + minor * 100000 + revision * 1000;
} }
private static int parseSuffixNumber(String substring) { private static int parseSuffixNumber(String substring) {
@ -136,10 +123,7 @@ public final class Version implements Comparable<Version> {
Version version = (Version) o; Version version = (Version) o;
return major == version.major && return major == version.major &&
minor == version.minor && minor == version.minor &&
revision == version.revision && revision == version.revision;
id == version.id &&
snapshot == version.snapshot &&
Objects.equals(suffix, version.suffix);
} }
@Override @Override
@ -176,4 +160,5 @@ public final class Version implements Comparable<Version> {
public int compareTo(Version other) { public int compareTo(Version other) {
return Integer.compare(getId(), other.getId()); return Integer.compare(getId(), other.getId());
} }
} }

View File

@ -0,0 +1,341 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Consumer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static java.util.Collections.emptyList;
import static java.util.Collections.unmodifiableList;
/**
* A container for elasticsearch supported version information used in BWC testing.
*
* Parse the Java source file containing the versions declarations and use the known rules to figure out which are all
* the version the current one is wire and index compatible with.
* On top of this, figure out which of these are unreleased and provide the branch they can be built from.
*
* Note that in this context, currentVersion is the unreleased version this build operates on.
* At any point in time there will surely be four such unreleased versions being worked on,
* thus currentVersion will be one of these.
*
* Considering:
* <dl>
* <dt>M, M &gt; 0</dt>
* <dd>last released major</dd>
* <dt>N, N &gt; 0</dt>
* <dd>last released minor</dd>
* </dl>
*
* <ul>
* <li>the unreleased <b>major</b>, M+1.0.0 on the `master` branch</li>
* <li>the unreleased <b>minor</b>, M.N.0 on the `M.x` (x is literal) branch</li>
* <li>the unreleased <b>bugfix</b>, M.N.c (c &gt; 0) on the `M.b` branch</li>
* <li>the unreleased <b>maintenance</b>, M-1.d.e ( d &gt; 0, e &gt; 0) on the `(M-1).d` branch</li>
* </ul>
* In addition to these, there will be a fifth one when a minor reaches feature freeze, we call this the <i>staged</i>
* version:
* <ul>
* <li>the unreleased <b>staged</b>, M.N-2.0 (N &gt; 2) on the `M.(N-2)` branch</li>
* </ul>
*
* Each build is only concerned with versions before it, as those are the ones that need to be tested
* for backwards compatibility. We never look forward, and don't add forward facing version number to branches of previous
* version.
*
* Each branch has a current version, and expected compatible versions are parsed from the server code's Version` class.
* We can reliably figure out which the unreleased versions are due to the convention of always adding the next unreleased
* version number to server in all branches when a version is released.
* E.x when M.N.c is released M.N.c+1 is added to the Version class mentioned above in all the following branches:
* `M.b`, `M.x` and `master` so we can reliably assume that the leafs of the version tree are unreleased.
* This convention is enforced by checking the versions we consider to be unreleased against an
* authoritative source (maven central).
* We are then able to map the unreleased version to branches in git and Gradle projects that are capable of checking
* out and building them, so we can include these in the testing plan as well.
*/
public class VersionCollection {
private static final Pattern LINE_PATTERN = Pattern.compile(
"\\W+public static final Version V_(\\d+)_(\\d+)_(\\d+)(_alpha\\d+|_beta\\d+|_rc\\d+)? .*"
);
private final Version currentVersion;
private final Map<Integer, List<Version>> groupByMajor;
public class UnreleasedVersionInfo {
public final Version version;
public final String branch;
public final String gradleProjectName;
UnreleasedVersionInfo(Version version, String branch, String gradleProjectName) {
this.version = version;
this.branch = branch;
this.gradleProjectName = gradleProjectName;
}
}
public VersionCollection(List<String> versionLines) {
this(versionLines, VersionProperties.getElasticsearch());
}
protected VersionCollection(List<String> versionLines, Version currentVersionProperty) {
groupByMajor = versionLines.stream()
.map(LINE_PATTERN::matcher)
.filter(Matcher::matches)
.map(match -> new Version(
Integer.parseInt(match.group(1)),
Integer.parseInt(match.group(2)),
Integer.parseInt(match.group(3)),
(match.group(4) == null ? "" : match.group(4)).replace('_', '-'),
false
))
.sorted()
.filter(version -> version.getSuffix().isEmpty() || version.equals(currentVersionProperty))
.collect(Collectors.groupingBy(Version::getMajor, Collectors.toList()));
if (groupByMajor.isEmpty()) {
throw new IllegalArgumentException("Could not parse any versions");
}
currentVersion = getLatestVersionByKey(
groupByMajor,
groupByMajor.keySet().stream().max(Integer::compareTo)
.orElseThrow(() -> new IllegalStateException("Unexpected number of versions in collection"))
);
assertCurrentVersionMatchesParsed(currentVersionProperty);
assertNoOlderThanTwoMajors();
markUnreleasedAsSnapshot();
}
private void markUnreleasedAsSnapshot() {
getUnreleased().forEach(uv ->
groupByMajor.get(uv.getMajor()).set(
groupByMajor.get(uv.getMajor()).indexOf(uv),
new Version(uv.getMajor(), uv.getMinor(), uv.getRevision(),uv.getSuffix(), true)
)
);
}
private void assertNoOlderThanTwoMajors() {
Set<Integer> majors = groupByMajor.keySet();
if (majors.size() != 2 && currentVersion.getMinor() != 0 && currentVersion.getMajor() != 0) {
throw new IllegalStateException(
"Expected exactly 2 majors in parsed versions but found: " + majors
);
}
}
private void assertCurrentVersionMatchesParsed(Version currentVersionProperty) {
if (currentVersionProperty.equals(currentVersion) == false) {
throw new IllegalStateException(
"Parsed versions latest version does not match the one configured in build properties. " +
"Parsed latest version is " + currentVersion + " but the build has " +
currentVersionProperty
);
}
}
public void forPreviousUnreleased(Consumer<UnreleasedVersionInfo> consumer) {
getUnreleased().stream()
.filter(version -> version.equals(currentVersion) == false)
.forEach(version -> consumer.accept(
new UnreleasedVersionInfo(
version,
getBranchFor(version),
getGradleProjectNameFor(version)
)
));
}
private String getGradleProjectNameFor(Version version) {
if (version.equals(currentVersion)) {
throw new IllegalArgumentException("The Gradle project to build " + version + " is the current build.");
}
Map<Integer, List<Version>> releasedMajorGroupedByMinor = getReleasedMajorGroupedByMinor();
if (version.getRevision() == 0) {
if (releasedMajorGroupedByMinor
.get(releasedMajorGroupedByMinor.keySet().stream().max(Integer::compareTo).orElse(0))
.contains(version)) {
return "minor";
} else {
return "staged";
}
} else {
if (releasedMajorGroupedByMinor
.getOrDefault(version.getMinor(), emptyList())
.contains(version)) {
return "bugfix";
} else {
return "maintenance";
}
}
}
private String getBranchFor(Version version) {
switch (getGradleProjectNameFor(version)) {
case "minor":
return version.getMajor() + ".x";
case "staged":
case "maintenance":
case "bugfix":
return version.getMajor() + "." + version.getMinor();
default:
throw new IllegalStateException("Unexpected Gradle project name");
}
}
public List<Version> getUnreleased() {
List<Version> unreleased = new ArrayList<>();
// The current version is being worked, is always unreleased
unreleased.add(currentVersion);
// the tip of the previous major is unreleased for sure, be it a minor or a bugfix
unreleased.add(getLatestVersionByKey(this.groupByMajor, currentVersion.getMajor() - 1));
final Map<Integer, List<Version>> groupByMinor = getReleasedMajorGroupedByMinor();
int greatestMinor = groupByMinor.keySet().stream().max(Integer::compareTo).orElse(0);
// the last bugfix for this minor series is always unreleased
unreleased.add(getLatestVersionByKey(groupByMinor, greatestMinor));
if (groupByMinor.get(greatestMinor).size() == 1) {
// we found an unreleased minor
unreleased.add(getLatestVersionByKey(groupByMinor, greatestMinor - 1));
if (groupByMinor.getOrDefault(greatestMinor - 1, emptyList()).size() == 1) {
// we found that the previous minor is staged but not yet released
// in this case, the minor before that has a bugfix
unreleased.add(getLatestVersionByKey(groupByMinor, greatestMinor - 2));
}
}
return unmodifiableList(
unreleased.stream()
.sorted()
.distinct()
.collect(Collectors.toList())
);
}
private Version getLatestVersionByKey(Map<Integer, List<Version>> groupByMajor, int key) {
return groupByMajor.getOrDefault(key, emptyList()).stream()
.max(Version::compareTo)
.orElseThrow(() -> new IllegalStateException("Unexpected number of versions in collection"));
}
private Map<Integer, List<Version>> getReleasedMajorGroupedByMinor() {
List<Version> currentMajorVersions = groupByMajor.get(currentVersion.getMajor());
List<Version> previousMajorVersions = groupByMajor.get(currentVersion.getMajor() - 1);
final Map<Integer, List<Version>> groupByMinor;
if (currentMajorVersions.size() == 1) {
// Current is an unreleased major: x.0.0 so we have to look for other unreleased versions in the previous major
groupByMinor = previousMajorVersions.stream()
.collect(Collectors.groupingBy(Version::getMinor, Collectors.toList()));
} else {
groupByMinor = currentMajorVersions.stream()
.collect(Collectors.groupingBy(Version::getMinor, Collectors.toList()));
}
return groupByMinor;
}
public void compareToAuthoritative(List<Version> authoritativeReleasedVersions) {
Set<Version> notReallyReleased = new HashSet<>(getReleased());
notReallyReleased.removeAll(authoritativeReleasedVersions);
if (notReallyReleased.isEmpty() == false) {
throw new IllegalStateException(
"out-of-date released versions" +
"\nFollowing versions are not really released, but the build thinks they are: " + notReallyReleased
);
}
Set<Version> incorrectlyConsideredUnreleased = new HashSet<>(authoritativeReleasedVersions);
incorrectlyConsideredUnreleased.retainAll(getUnreleased());
if (incorrectlyConsideredUnreleased.isEmpty() == false) {
throw new IllegalStateException(
"out-of-date released versions" +
"\nBuild considers versions unreleased, " +
"but they are released according to an authoritative source: " + incorrectlyConsideredUnreleased +
"\nThe next versions probably needs to be added to Version.java (CURRENT doesn't count)."
);
}
}
private List<Version> getReleased() {
List<Version> unreleased = getUnreleased();
return groupByMajor.values().stream()
.flatMap(Collection::stream)
.filter(each -> unreleased.contains(each) == false)
.collect(Collectors.toList());
}
public List<Version> getIndexCompatible() {
return unmodifiableList(
Stream.concat(
groupByMajor.get(currentVersion.getMajor() - 1).stream(),
groupByMajor.get(currentVersion.getMajor()).stream()
)
.filter(version -> version.equals(currentVersion) == false)
.collect(Collectors.toList())
);
}
public List<Version> getWireCompatible() {
List<Version> wireCompat = new ArrayList<>();
List<Version> prevMajors = groupByMajor.get(currentVersion.getMajor() - 1);
int minor = prevMajors.get(prevMajors.size() - 1).getMinor();
for (int i = prevMajors.size() - 1;
i > 0 && prevMajors.get(i).getMinor() == minor;
i--
) {
wireCompat.add(prevMajors.get(i));
}
wireCompat.addAll(groupByMajor.get(currentVersion.getMajor()));
wireCompat.remove(currentVersion);
wireCompat.sort(Version::compareTo);
return unmodifiableList(wireCompat);
}
public List<Version> getUnreleasedIndexCompatible() {
List<Version> unreleasedIndexCompatible = new ArrayList<>(getIndexCompatible());
unreleasedIndexCompatible.retainAll(getUnreleased());
return unmodifiableList(unreleasedIndexCompatible);
}
public List<Version> getUnreleasedWireCompatible() {
List<Version> unreleasedWireCompatible = new ArrayList<>(getWireCompatible());
unreleasedWireCompatible.retainAll(getUnreleased());
return unmodifiableList(unreleasedWireCompatible);
}
}

View File

@ -1,236 +0,0 @@
package org.elasticsearch.gradle
import org.elasticsearch.gradle.test.GradleUnitTestCase
import org.junit.Test
class VersionCollectionTests extends GradleUnitTestCase {
String formatVersion(String version) {
return " public static final Version V_${version.replaceAll("\\.", "_")} "
}
List<String> allVersions = [formatVersion('5.0.0'), formatVersion('5.0.0_alpha1'), formatVersion('5.0.0_alpha2'), formatVersion('5.0.0_beta1'),
formatVersion('5.0.0_rc1'),formatVersion('5.0.0_rc2'),formatVersion('5.0.1'), formatVersion('5.0.2'),
formatVersion('5.1.1'), formatVersion('5.1.2'), formatVersion('5.2.0'), formatVersion('5.2.1'), formatVersion('6.0.0'),
formatVersion('6.0.1'), formatVersion('6.1.0'), formatVersion('6.1.1'), formatVersion('6.2.0'), formatVersion('6.3.0'),
formatVersion('7.0.0_alpha1'), formatVersion('7.0.0_alpha2')]
/**
* This validates the logic of being on a unreleased major branch with a staged major-1.minor sibling. This case happens when a version is
* branched from Major-1.x At the time of this writing 6.2 is unreleased and 6.3 is the 6.x branch. This test simulates the behavior
* from 7.0 perspective, or master at the time of this writing.
*/
@Test
void testAgainstMajorUnreleasedWithExistingStagedMinorRelease() {
VersionCollection vc = new VersionCollection(allVersions)
assertNotNull(vc)
assertEquals(vc.nextMinorSnapshot, Version.fromString("6.3.0-SNAPSHOT"))
assertEquals(vc.stagedMinorSnapshot, Version.fromString("6.2.0-SNAPSHOT"))
assertEquals(vc.nextBugfixSnapshot, Version.fromString("6.1.1-SNAPSHOT"))
assertNull(vc.maintenanceBugfixSnapshot)
vc.indexCompatible.containsAll(vc.versions)
// This should contain the same list sans the current version
List indexCompatList = [Version.fromString("6.0.0"), Version.fromString("6.0.1"),
Version.fromString("6.1.0"), Version.fromString("6.1.1-SNAPSHOT"),
Version.fromString("6.2.0-SNAPSHOT"), Version.fromString("6.3.0-SNAPSHOT")]
assertTrue(indexCompatList.containsAll(vc.indexCompatible))
assertTrue(vc.indexCompatible.containsAll(indexCompatList))
List wireCompatList = [Version.fromString("6.3.0-SNAPSHOT")]
assertTrue(wireCompatList.containsAll(vc.wireCompatible))
assertTrue(vc.wireCompatible.containsAll(wireCompatList))
assertEquals(vc.snapshotsIndexCompatible.size(), 3)
assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("6.3.0-SNAPSHOT")))
assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("6.2.0-SNAPSHOT")))
assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("6.1.1-SNAPSHOT")))
assertEquals(vc.snapshotsWireCompatible.size(), 1)
assertEquals(vc.snapshotsWireCompatible.first(), Version.fromString("6.3.0-SNAPSHOT"))
}
/**
* This validates the logic of being on a unreleased major branch without a staged major-1.minor sibling. This case happens once a staged,
* unreleased minor is released. At the time of this writing 6.2 is unreleased, so adding a 6.2.1 simulates a 6.2 release. This test
* simulates the behavior from 7.0 perspective, or master at the time of this writing.
*/
@Test
void testAgainstMajorUnreleasedWithoutStagedMinorRelease() {
List localVersion = allVersions.clone()
localVersion.add(formatVersion('6.2.1')) // release 6.2
VersionCollection vc = new VersionCollection(localVersion)
assertNotNull(vc)
assertEquals(vc.nextMinorSnapshot, Version.fromString("6.3.0-SNAPSHOT"))
assertEquals(vc.stagedMinorSnapshot, null)
assertEquals(vc.nextBugfixSnapshot, Version.fromString("6.2.1-SNAPSHOT"))
assertNull(vc.maintenanceBugfixSnapshot)
vc.indexCompatible.containsAll(vc.versions)
// This should contain the same list sans the current version
List indexCompatList = [Version.fromString("6.0.0"), Version.fromString("6.0.1"),
Version.fromString("6.1.0"), Version.fromString("6.1.1"),
Version.fromString("6.2.0"), Version.fromString("6.2.1-SNAPSHOT"),
Version.fromString("6.3.0-SNAPSHOT")]
assertTrue(indexCompatList.containsAll(vc.indexCompatible))
assertTrue(vc.indexCompatible.containsAll(indexCompatList))
List wireCompatList = [Version.fromString("6.3.0-SNAPSHOT")]
assertTrue(wireCompatList.containsAll(vc.wireCompatible))
assertTrue(vc.wireCompatible.containsAll(wireCompatList))
assertEquals(vc.snapshotsIndexCompatible.size(), 2)
assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("6.3.0-SNAPSHOT")))
assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("6.2.1-SNAPSHOT")))
assertEquals(vc.snapshotsWireCompatible.size(), 1)
assertEquals(vc.snapshotsWireCompatible.first(), Version.fromString("6.3.0-SNAPSHOT"))
}
/**
* This validates the logic of being on a unreleased minor branch with a staged minor sibling. This case happens when a version is
* branched from Major.x At the time of this writing 6.2 is unreleased and 6.3 is the 6.x branch. This test simulates the behavior
* from 6.3 perspective.
*/
@Test
void testAgainstMinorReleasedBranch() {
List localVersion = allVersions.clone()
localVersion.removeAll { it.toString().contains('7_0_0')} // remove all the 7.x so that the actual version is 6.3 (6.x)
VersionCollection vc = new VersionCollection(localVersion)
assertNotNull(vc)
assertEquals(vc.nextMinorSnapshot, null)
assertEquals(vc.stagedMinorSnapshot, Version.fromString("6.2.0-SNAPSHOT"))
assertEquals(vc.nextBugfixSnapshot, Version.fromString("6.1.1-SNAPSHOT"))
assertEquals(vc.maintenanceBugfixSnapshot, Version.fromString("5.2.1-SNAPSHOT"))
// This should contain the same list sans the current version
List indexCompatList = vc.versions.subList(0, vc.versions.size() - 1)
assertTrue(indexCompatList.containsAll(vc.indexCompatible))
assertTrue(vc.indexCompatible.containsAll(indexCompatList))
List wireCompatList = [Version.fromString("5.2.0"), Version.fromString("5.2.1-SNAPSHOT"), Version.fromString("6.0.0"),
Version.fromString("6.0.1"), Version.fromString("6.1.0"), Version.fromString("6.1.1-SNAPSHOT"),
Version.fromString("6.2.0-SNAPSHOT")]
assertTrue(wireCompatList.containsAll(vc.wireCompatible))
assertTrue(vc.wireCompatible.containsAll(wireCompatList))
assertEquals(vc.snapshotsIndexCompatible.size(), 3)
assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("6.2.0-SNAPSHOT")))
assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("6.1.1-SNAPSHOT")))
assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("5.2.1-SNAPSHOT")))
assertEquals(vc.snapshotsWireCompatible.size(), 3)
assertTrue(vc.snapshotsWireCompatible.contains(Version.fromString("6.2.0-SNAPSHOT")))
assertTrue(vc.snapshotsWireCompatible.contains(Version.fromString("6.1.1-SNAPSHOT")))
assertTrue(vc.snapshotsWireCompatible.contains(Version.fromString("5.2.1-SNAPSHOT")))
}
/**
* This validates the logic of being on a unreleased minor branch without a staged minor sibling. This case happens once a staged,
* unreleased minor is released. At the time of this writing 6.2 is unreleased, so adding a 6.2.1 simulates a 6.2 release. This test
* simulates the behavior from 6.3 perspective.
*/
@Test
void testAgainstMinorReleasedBranchNoStagedMinor() {
List localVersion = allVersions.clone()
// remove all the 7.x and add a 6.2.1 which means 6.2 was released
localVersion.removeAll { it.toString().contains('7_0_0')}
localVersion.add(formatVersion('6.2.1'))
VersionCollection vc = new VersionCollection(localVersion)
assertNotNull(vc)
assertEquals(vc.nextMinorSnapshot, null)
assertEquals(vc.stagedMinorSnapshot, null)
assertEquals(vc.nextBugfixSnapshot, Version.fromString("6.2.1-SNAPSHOT"))
assertEquals(vc.maintenanceBugfixSnapshot, Version.fromString("5.2.1-SNAPSHOT"))
// This should contain the same list sans the current version
List indexCompatList = vc.versions.subList(0, vc.versions.size() - 1)
assertTrue(indexCompatList.containsAll(vc.indexCompatible))
assertTrue(vc.indexCompatible.containsAll(indexCompatList))
List wireCompatList = [Version.fromString("5.2.0"), Version.fromString("5.2.1-SNAPSHOT"), Version.fromString("6.0.0"),
Version.fromString("6.0.1"), Version.fromString("6.1.0"), Version.fromString("6.1.1"),
Version.fromString("6.2.0"), Version.fromString("6.2.1-SNAPSHOT")]
assertTrue(wireCompatList.containsAll(vc.wireCompatible))
assertTrue(vc.wireCompatible.containsAll(wireCompatList))
assertEquals(vc.snapshotsIndexCompatible.size(), 2)
assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("6.2.1-SNAPSHOT")))
assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("5.2.1-SNAPSHOT")))
assertEquals(vc.snapshotsWireCompatible.size(), 2)
assertTrue(vc.snapshotsWireCompatible.contains(Version.fromString("6.2.1-SNAPSHOT")))
assertTrue(vc.snapshotsWireCompatible.contains(Version.fromString("5.2.1-SNAPSHOT")))
}
/**
* This validates the logic of being on a released minor branch. At the time of writing, 6.2 is unreleased, so this is equivalent of being
* on 6.1.
*/
@Test
void testAgainstOldMinor() {
List localVersion = allVersions.clone()
// remove the 7 alphas and the ones greater than 6.1
localVersion.removeAll { it.toString().contains('7_0_0') || it.toString().contains('V_6_2') || it.toString().contains('V_6_3') }
VersionCollection vc = new VersionCollection(localVersion)
assertNotNull(vc)
assertEquals(vc.nextMinorSnapshot, null)
assertEquals(vc.stagedMinorSnapshot, null)
assertEquals(vc.nextBugfixSnapshot, null)
assertEquals(vc.maintenanceBugfixSnapshot, Version.fromString("5.2.1-SNAPSHOT"))
// This should contain the same list sans the current version
List indexCompatList = vc.versions.subList(0, vc.versions.size() - 1)
assertTrue(indexCompatList.containsAll(vc.indexCompatible))
assertTrue(vc.indexCompatible.containsAll(indexCompatList))
List wireCompatList = [Version.fromString("5.2.0"), Version.fromString("5.2.1-SNAPSHOT"), Version.fromString("6.0.0"),
Version.fromString("6.0.1"), Version.fromString("6.1.0")]
assertTrue(wireCompatList.containsAll(vc.wireCompatible))
assertTrue(vc.wireCompatible.containsAll(wireCompatList))
assertEquals(vc.snapshotsIndexCompatible.size(), 1)
assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("5.2.1-SNAPSHOT")))
assertEquals(vc.snapshotsWireCompatible.size(), 1)
assertTrue(vc.snapshotsWireCompatible.contains(Version.fromString("5.2.1-SNAPSHOT")))
}
/**
* This validates the lower bound of wire compat, which is 5.0. It also validates that the span of 2.x to 5.x if it is decided to port
* this fix all the way to the maint 5.6 release.
*/
@Test
void testFloorOfWireCompatVersions() {
List localVersion = [formatVersion('2.0.0'), formatVersion('2.0.1'), formatVersion('2.1.0'), formatVersion('2.1.1'),
formatVersion('5.0.0'), formatVersion('5.0.1'), formatVersion('5.1.0'), formatVersion('5.1.1'),
formatVersion('5.2.0'),formatVersion('5.2.1'),formatVersion('5.3.0'),formatVersion('5.3.1'),
formatVersion('5.3.2')]
VersionCollection vc = new VersionCollection(localVersion)
assertNotNull(vc)
assertEquals(vc.maintenanceBugfixSnapshot, Version.fromString("2.1.1-SNAPSHOT"))
// This should contain the same list sans the current version
List indexCompatList = vc.versions.subList(0, vc.versions.size() - 1)
assertTrue(indexCompatList.containsAll(vc.indexCompatible))
assertTrue(vc.indexCompatible.containsAll(indexCompatList))
List wireCompatList = [Version.fromString("2.1.0"), Version.fromString("2.1.1-SNAPSHOT"), Version.fromString("5.0.0"),
Version.fromString("5.0.1"), Version.fromString("5.1.0"),
Version.fromString("5.1.1"), Version.fromString("5.2.0"), Version.fromString("5.2.1"),
Version.fromString("5.3.0"), Version.fromString("5.3.1")]
List<Version> compatible = vc.wireCompatible
assertTrue(wireCompatList.containsAll(compatible))
assertTrue(vc.wireCompatible.containsAll(wireCompatList))
assertEquals(vc.snapshotsIndexCompatible.size(), 1)
assertTrue(vc.snapshotsIndexCompatible.contains(Version.fromString("2.1.1-SNAPSHOT")))
// ensure none of the 2.x snapshots appear here, as this is the floor of bwc for wire compat
assertEquals(vc.snapshotsWireCompatible.size(), 0)
}
}

View File

@ -0,0 +1,406 @@
package org.elasticsearch.gradle;
import org.elasticsearch.gradle.test.GradleUnitTestCase;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
public class VersionCollectionTests extends GradleUnitTestCase {
private static final Map<String, List<String>> sampleVersions = new HashMap<>();
@Rule
public ExpectedException expectedEx = ExpectedException.none();
static {
// unreleased major and two unreleased minors ( minor in feature freeze )
sampleVersions.put("8.0.0", asList(
"7_0_0", "7_0_1", "7_1_0", "7_1_1", "7_2_0", "7_3_0", "8.0.0"
));
sampleVersions.put("7.0.0-alpha1", asList(
"6_0_0_alpha1", "6_0_0_alpha2", "6_0_0_beta1", "6_0_0_beta2", "6_0_0_rc1", "6_0_0_rc2",
"6_0_0", "6_0_1", "6_1_0", "6_1_1", "6_1_2", "6_1_3", "6_1_4",
"6_2_0", "6_2_1", "6_2_2", "6_2_3", "6_2_4",
"6_3_0", "6_3_1", "6_3_2",
"6_4_0", "6_4_1", "6_4_2",
"6_5_0", "7_0_0_alpha1"
));
sampleVersions.put("6.5.0", asList(
"5_0_0_alpha1", "5_0_0_alpha2", "5_0_0_alpha3", "5_0_0_alpha4", "5_0_0_alpha5", "5_0_0_beta1", "5_0_0_rc1",
"5_0_0", "5_0_1", "5_0_2", "5_1_1", "5_1_2", "5_2_0", "5_2_1", "5_2_2", "5_3_0", "5_3_1", "5_3_2", "5_3_3",
"5_4_0", "5_4_1", "5_4_2", "5_4_3", "5_5_0", "5_5_1", "5_5_2", "5_5_3", "5_6_0", "5_6_1", "5_6_2", "5_6_3",
"5_6_4", "5_6_5", "5_6_6", "5_6_7", "5_6_8", "5_6_9", "5_6_10", "5_6_11", "5_6_12", "5_6_13",
"6_0_0_alpha1", "6_0_0_alpha2", "6_0_0_beta1", "6_0_0_beta2", "6_0_0_rc1", "6_0_0_rc2", "6_0_0", "6_0_1",
"6_1_0", "6_1_1", "6_1_2", "6_1_3", "6_1_4", "6_2_0", "6_2_1", "6_2_2", "6_2_3", "6_2_4", "6_3_0", "6_3_1",
"6_3_2", "6_4_0", "6_4_1", "6_4_2", "6_5_0"
));
sampleVersions.put("6.6.0", asList(
"5_0_0_alpha1", "5_0_0_alpha2", "5_0_0_alpha3", "5_0_0_alpha4", "5_0_0_alpha5", "5_0_0_beta1", "5_0_0_rc1",
"5_0_0", "5_0_1", "5_0_2", "5_1_1", "5_1_2", "5_2_0", "5_2_1", "5_2_2", "5_3_0", "5_3_1", "5_3_2", "5_3_3",
"5_4_0", "5_4_1", "5_4_2", "5_4_3", "5_5_0", "5_5_1", "5_5_2", "5_5_3", "5_6_0", "5_6_1", "5_6_2", "5_6_3",
"5_6_4", "5_6_5", "5_6_6", "5_6_7", "5_6_8", "5_6_9", "5_6_10", "5_6_11", "5_6_12", "5_6_13",
"6_0_0_alpha1", "6_0_0_alpha2", "6_0_0_beta1", "6_0_0_beta2", "6_0_0_rc1", "6_0_0_rc2", "6_0_0", "6_0_1",
"6_1_0", "6_1_1", "6_1_2", "6_1_3", "6_1_4", "6_2_0", "6_2_1", "6_2_2", "6_2_3", "6_2_4", "6_3_0", "6_3_1",
"6_3_2", "6_4_0", "6_4_1", "6_4_2", "6_5_0", "6_6_0"
));
sampleVersions.put("6.4.2", asList(
"5_0_0_alpha1", "5_0_0_alpha2", "5_0_0_alpha3", "5_0_0_alpha4", "5_0_0_alpha5", "5_0_0_beta1", "5_0_0_rc1",
"5_0_0", "5_0_1", "5_0_2", "5_1_1", "5_1_2", "5_2_0", "5_2_1", "5_2_2", "5_3_0",
"5_3_1", "5_3_2", "5_3_3", "5_4_0", "5_4_1", "5_4_2", "5_4_3", "5_5_0", "5_5_1", "5_5_2", "5_5_3",
"5_6_0", "5_6_1", "5_6_2", "5_6_3", "5_6_4", "5_6_5", "5_6_6", "5_6_7", "5_6_8", "5_6_9", "5_6_10",
"5_6_11", "5_6_12", "5_6_13",
"6_0_0_alpha1", "6_0_0_alpha2", "6_0_0_beta1", "6_0_0_beta2", "6_0_0_rc1", "6_0_0_rc2",
"6_0_0", "6_0_1", "6_1_0", "6_1_1", "6_1_2", "6_1_3", "6_1_4", "6_2_0", "6_2_1", "6_2_2", "6_2_3",
"6_2_4", "6_3_0", "6_3_1", "6_3_2", "6_4_0", "6_4_1", "6_4_2"
));
}
@Test(expected = IllegalArgumentException.class)
public void testExceptionOnEmpty() {
new VersionCollection(asList("foo", "bar"), Version.fromString("7.0.0"));
}
@Test(expected = IllegalStateException.class)
public void testExceptionOnNonCurrent() {
new VersionCollection(singletonList(formatVersionToLine("6.5.0")), Version.fromString("7.0.0"));
}
@Test(expected = IllegalStateException.class)
public void testExceptionOnTooManyMajors() {
new VersionCollection(
asList(
formatVersionToLine("5.6.12"),
formatVersionToLine("6.5.0"),
formatVersionToLine("7.0.0")
),
Version.fromString("7.0.0")
);
}
public void testWireCompatible() {
assertVersionsEquals(
singletonList("6.5.0-SNAPSHOT"),
getVersionCollection("7.0.0-alpha1").getWireCompatible()
);
assertVersionsEquals(
asList(
"5.6.0", "5.6.1", "5.6.2", "5.6.3", "5.6.4", "5.6.5", "5.6.6", "5.6.7", "5.6.8", "5.6.9", "5.6.10",
"5.6.11", "5.6.12", "5.6.13-SNAPSHOT",
"6.0.0", "6.0.1", "6.1.0", "6.1.1", "6.1.2", "6.1.3", "6.1.4",
"6.2.0", "6.2.1", "6.2.2", "6.2.3", "6.2.4",
"6.3.0", "6.3.1", "6.3.2", "6.4.0", "6.4.1", "6.4.2-SNAPSHOT"
),
getVersionCollection("6.5.0").getWireCompatible()
);
assertVersionsEquals(
asList(
"5.6.0", "5.6.1", "5.6.2", "5.6.3", "5.6.4", "5.6.5", "5.6.6", "5.6.7", "5.6.8", "5.6.9", "5.6.10",
"5.6.11", "5.6.12", "5.6.13-SNAPSHOT", "6.0.0", "6.0.1", "6.1.0", "6.1.1", "6.1.2", "6.1.3", "6.1.4",
"6.2.0", "6.2.1", "6.2.2", "6.2.3", "6.2.4", "6.3.0", "6.3.1", "6.3.2", "6.4.0", "6.4.1"
),
getVersionCollection("6.4.2").getWireCompatible()
);
assertVersionsEquals(
asList(
"5.6.0", "5.6.1", "5.6.2", "5.6.3", "5.6.4", "5.6.5", "5.6.6", "5.6.7", "5.6.8", "5.6.9", "5.6.10",
"5.6.11", "5.6.12", "5.6.13-SNAPSHOT",
"6.0.0", "6.0.1", "6.1.0", "6.1.1", "6.1.2", "6.1.3", "6.1.4",
"6.2.0", "6.2.1", "6.2.2", "6.2.3", "6.2.4",
"6.3.0", "6.3.1", "6.3.2", "6.4.0", "6.4.1", "6.4.2-SNAPSHOT", "6.5.0-SNAPSHOT"
),
getVersionCollection("6.6.0").getWireCompatible()
);
assertVersionsEquals(
singletonList("7.3.0"),
getVersionCollection("8.0.0").getWireCompatible()
);
}
public void testWireCompatibleUnreleased() {
assertVersionsEquals(
singletonList("6.5.0-SNAPSHOT"),
getVersionCollection("7.0.0-alpha1").getUnreleasedWireCompatible()
);
assertVersionsEquals(
asList("5.6.13-SNAPSHOT", "6.4.2-SNAPSHOT"),
getVersionCollection("6.5.0").getUnreleasedWireCompatible()
);
assertVersionsEquals(
singletonList("5.6.13-SNAPSHOT"),
getVersionCollection("6.4.2").getUnreleasedWireCompatible()
);
assertVersionsEquals(
asList("5.6.13-SNAPSHOT", "6.4.2-SNAPSHOT", "6.5.0-SNAPSHOT"),
getVersionCollection("6.6.0").getUnreleasedWireCompatible()
);
assertVersionsEquals(
singletonList("7.3.0"),
getVersionCollection("8.0.0").getUnreleasedWireCompatible()
);
}
public void testIndexCompatible() {
assertVersionsEquals(
asList(
"6.0.0", "6.0.1", "6.1.0", "6.1.1", "6.1.2", "6.1.3", "6.1.4",
"6.2.0", "6.2.1", "6.2.2", "6.2.3", "6.2.4", "6.3.0", "6.3.1",
"6.3.2", "6.4.0", "6.4.1", "6.4.2-SNAPSHOT", "6.5.0-SNAPSHOT"
),
getVersionCollection("7.0.0-alpha1").getIndexCompatible()
);
assertVersionsEquals(
asList(
"5.0.0", "5.0.1", "5.0.2", "5.1.1", "5.1.2", "5.2.0", "5.2.1", "5.2.2", "5.3.0", "5.3.1", "5.3.2", "5.3.3",
"5.4.0", "5.4.1", "5.4.2", "5.4.3", "5.5.0", "5.5.1", "5.5.2", "5.5.3", "5.6.0", "5.6.1", "5.6.2", "5.6.3",
"5.6.4", "5.6.5", "5.6.6", "5.6.7", "5.6.8", "5.6.9", "5.6.10", "5.6.11", "5.6.12", "5.6.13-SNAPSHOT",
"6.0.0", "6.0.1", "6.1.0", "6.1.1", "6.1.2", "6.1.3", "6.1.4", "6.2.0", "6.2.1", "6.2.2", "6.2.3", "6.2.4",
"6.3.0", "6.3.1", "6.3.2", "6.4.0", "6.4.1", "6.4.2-SNAPSHOT"
),
getVersionCollection("6.5.0").getIndexCompatible()
);
assertVersionsEquals(
asList(
"5.0.0", "5.0.1", "5.0.2", "5.1.1", "5.1.2", "5.2.0", "5.2.1", "5.2.2", "5.3.0", "5.3.1", "5.3.2", "5.3.3",
"5.4.0", "5.4.1", "5.4.2", "5.4.3", "5.5.0", "5.5.1", "5.5.2", "5.5.3", "5.6.0", "5.6.1", "5.6.2", "5.6.3",
"5.6.4", "5.6.5", "5.6.6", "5.6.7", "5.6.8", "5.6.9", "5.6.10", "5.6.11", "5.6.12", "5.6.13-SNAPSHOT",
"6.0.0", "6.0.1", "6.1.0", "6.1.1", "6.1.2", "6.1.3", "6.1.4", "6.2.0", "6.2.1", "6.2.2", "6.2.3", "6.2.4",
"6.3.0", "6.3.1", "6.3.2", "6.4.0", "6.4.1"
),
getVersionCollection("6.4.2").getIndexCompatible()
);
assertVersionsEquals(
asList(
"5.0.0", "5.0.1", "5.0.2", "5.1.1", "5.1.2", "5.2.0", "5.2.1", "5.2.2", "5.3.0", "5.3.1", "5.3.2", "5.3.3",
"5.4.0", "5.4.1", "5.4.2", "5.4.3", "5.5.0", "5.5.1", "5.5.2", "5.5.3", "5.6.0", "5.6.1", "5.6.2", "5.6.3",
"5.6.4", "5.6.5", "5.6.6", "5.6.7", "5.6.8", "5.6.9", "5.6.10", "5.6.11", "5.6.12", "5.6.13-SNAPSHOT",
"6.0.0", "6.0.1", "6.1.0", "6.1.1", "6.1.2", "6.1.3", "6.1.4", "6.2.0", "6.2.1", "6.2.2", "6.2.3", "6.2.4",
"6.3.0", "6.3.1", "6.3.2", "6.4.0", "6.4.1", "6.4.2-SNAPSHOT", "6.5.0-SNAPSHOT"
),
getVersionCollection("6.6.0").getIndexCompatible()
);
assertVersionsEquals(
asList("7.0.0", "7.0.1", "7.1.0", "7.1.1", "7.2.0", "7.3.0"),
getVersionCollection("8.0.0").getIndexCompatible()
);
}
public void testIndexCompatibleUnreleased() {
assertVersionsEquals(
asList("6.4.2-SNAPSHOT", "6.5.0-SNAPSHOT"),
getVersionCollection("7.0.0-alpha1").getUnreleasedIndexCompatible()
);
assertVersionsEquals(
asList("5.6.13-SNAPSHOT", "6.4.2-SNAPSHOT"),
getVersionCollection("6.5.0").getUnreleasedIndexCompatible()
);
assertVersionsEquals(
singletonList("5.6.13-SNAPSHOT"),
getVersionCollection("6.4.2").getUnreleasedIndexCompatible()
);
assertVersionsEquals(
asList("5.6.13-SNAPSHOT", "6.4.2-SNAPSHOT", "6.5.0-SNAPSHOT"),
getVersionCollection("6.6.0").getUnreleasedIndexCompatible()
);
assertVersionsEquals(
asList("7.1.1", "7.2.0", "7.3.0"),
getVersionCollection("8.0.0").getUnreleasedIndexCompatible()
);
}
public void testGetUnreleased() {
assertVersionsEquals(
asList("6.4.2", "6.5.0", "7.0.0-alpha1"),
getVersionCollection("7.0.0-alpha1").getUnreleased()
);
assertVersionsEquals(
asList("5.6.13", "6.4.2", "6.5.0"),
getVersionCollection("6.5.0").getUnreleased()
);
assertVersionsEquals(
asList("5.6.13", "6.4.2"),
getVersionCollection("6.4.2").getUnreleased()
);
assertVersionsEquals(
asList("5.6.13", "6.4.2", "6.5.0", "6.6.0"),
getVersionCollection("6.6.0").getUnreleased()
);
assertVersionsEquals(
asList("7.1.1", "7.2.0", "7.3.0", "8.0.0"),
getVersionCollection("8.0.0").getUnreleased()
);
}
public void testGetBranch() {
assertUnreleasedBranchNames(
asList("6.4", "6.x"),
getVersionCollection("7.0.0-alpha1")
);
assertUnreleasedBranchNames(
asList("5.6", "6.4"),
getVersionCollection("6.5.0")
);
assertUnreleasedBranchNames(
singletonList("5.6"),
getVersionCollection("6.4.2")
);
assertUnreleasedBranchNames(
asList("5.6", "6.4", "6.5"),
getVersionCollection("6.6.0")
);
assertUnreleasedBranchNames(
asList("7.1", "7.2", "7.x"),
getVersionCollection("8.0.0")
);
}
public void testGetGradleProjectName() {
assertUnreleasedGradleProjectNames(
asList("bugfix", "minor"),
getVersionCollection("7.0.0-alpha1")
);
assertUnreleasedGradleProjectNames(
asList("maintenance", "bugfix"),
getVersionCollection("6.5.0")
);
assertUnreleasedGradleProjectNames(
singletonList("maintenance"),
getVersionCollection("6.4.2")
);
assertUnreleasedGradleProjectNames(
asList("maintenance", "bugfix", "staged"),
getVersionCollection("6.6.0")
);
assertUnreleasedGradleProjectNames(
asList("bugfix", "staged", "minor"),
getVersionCollection("8.0.0")
);
}
public void testCompareToAuthoritative() {
List<String> listOfVersions = asList("7.0.0", "7.0.1", "7.1.0", "7.1.1", "7.2.0", "7.3.0", "8.0.0");
List<Version> authoritativeReleasedVersions = Stream.of("7.0.0", "7.0.1", "7.1.0")
.map(Version::fromString)
.collect(Collectors.toList());
VersionCollection vc = new VersionCollection(
listOfVersions.stream()
.map(this::formatVersionToLine)
.collect(Collectors.toList()),
Version.fromString("8.0.0")
);
vc.compareToAuthoritative(authoritativeReleasedVersions);
}
public void testCompareToAuthoritativeUnreleasedActuallyReleased() {
List<String> listOfVersions = asList("7.0.0", "7.0.1", "7.1.0", "7.1.1", "7.2.0", "7.3.0", "8.0.0");
List<Version> authoritativeReleasedVersions = Stream.of("7.0.0", "7.0.1", "7.1.0", "7.1.1", "8.0.0")
.map(Version::fromString)
.collect(Collectors.toList());
VersionCollection vc = new VersionCollection(
listOfVersions.stream()
.map(this::formatVersionToLine)
.collect(Collectors.toList()),
Version.fromString("8.0.0")
);
expectedEx.expect(IllegalStateException.class);
expectedEx.expectMessage("but they are released");
vc.compareToAuthoritative(authoritativeReleasedVersions);
}
public void testCompareToAuthoritativeNotReallyRelesed() {
List<String> listOfVersions = asList("7.0.0", "7.0.1", "7.1.0", "7.1.1", "7.2.0", "7.3.0", "8.0.0");
List<Version> authoritativeReleasedVersions = Stream.of("7.0.0", "7.0.1")
.map(Version::fromString)
.collect(Collectors.toList());
VersionCollection vc = new VersionCollection(
listOfVersions.stream()
.map(this::formatVersionToLine)
.collect(Collectors.toList()),
Version.fromString("8.0.0")
);
expectedEx.expect(IllegalStateException.class);
expectedEx.expectMessage("not really released");
vc.compareToAuthoritative(authoritativeReleasedVersions);
}
private void assertUnreleasedGradleProjectNames(List<String> expectedNAmes, VersionCollection versionCollection) {
List<String> actualNames = new ArrayList<>();
versionCollection.forPreviousUnreleased(unreleasedVersion ->
actualNames.add(unreleasedVersion.gradleProjectName)
);
assertEquals(expectedNAmes, actualNames);
}
private void assertUnreleasedBranchNames(List<String> expectedBranches, VersionCollection versionCollection) {
List<String> actualBranches = new ArrayList<>();
versionCollection.forPreviousUnreleased(unreleasedVersionInfo ->
actualBranches.add(unreleasedVersionInfo.branch)
);
assertEquals(expectedBranches, actualBranches);
}
private String formatVersionToLine(final String version) {
return " public static final Version V_" + version.replaceAll("\\.", "_") + " ";
}
private void assertVersionsEquals(List<String> expected, List<Version> actual) {
assertEquals(
expected.stream()
.map(Version::fromString)
.collect(Collectors.toList()),
actual
);
}
private VersionCollection getVersionCollection(String currentVersion) {
return new VersionCollection(
sampleVersions.get(currentVersion).stream()
.map(this::formatVersionToLine)
.collect(Collectors.toList()),
Version.fromString(currentVersion)
);
}
}

View File

@ -44,16 +44,12 @@ public class VersionTests extends GradleUnitTestCase {
assertTrue("1.10.20 is not interpreted as before 2.0.0", assertTrue("1.10.20 is not interpreted as before 2.0.0",
Version.fromString("1.10.20").before("2.0.0") Version.fromString("1.10.20").before("2.0.0")
); );
assertTrue("7.0.0-alpha1 is not interpreted as before 7.0.0-alpha2",
Version.fromString("7.0.0-alpha1").before("7.0.0-alpha2")
);
assertTrue("7.0.0-alpha1 should be equal to 7.0.0-alpha1", assertTrue("7.0.0-alpha1 should be equal to 7.0.0-alpha1",
Version.fromString("7.0.0-alpha1").equals(Version.fromString("7.0.0-alpha1")) Version.fromString("7.0.0-alpha1").equals(Version.fromString("7.0.0-alpha1"))
); );
assertTrue("7.0.0-SNAPSHOT should be equal to 7.0.0-SNAPSHOT", assertTrue("7.0.0-SNAPSHOT should be equal to 7.0.0-SNAPSHOT",
Version.fromString("7.0.0-SNAPSHOT").equals(Version.fromString("7.0.0-SNAPSHOT")) Version.fromString("7.0.0-SNAPSHOT").equals(Version.fromString("7.0.0-SNAPSHOT"))
); );
assertEquals(Version.fromString("5.2.1-SNAPSHOT"), Version.fromString("5.2.1-SNAPSHOT"));
} }
public void testCollections() { public void testCollections() {
@ -89,51 +85,10 @@ public class VersionTests extends GradleUnitTestCase {
new Version(7, 0, 0, "", true) new Version(7, 0, 0, "", true)
)); ));
// snapshot is not taken into account TODO inconsistent with equals
assertEquals( assertEquals(
0, 0,
new Version(7, 0, 0, "", false).compareTo( new Version(7, 0, 0, "-alpha1", false).compareTo(
new Version(7, 0, 0, null, true)) new Version(7, 0, 0, "", true))
);
// without sufix is smaller than with TODO
assertOrder(
new Version(7, 0, 0, null, false),
new Version(7, 0, 0, "-alpha1", false)
);
// numbered sufix
assertOrder(
new Version(7, 0, 0, "-alpha1", false),
new Version(7, 0, 0, "-alpha2", false)
);
// ranked sufix
assertOrder(
new Version(7, 0, 0, "-alpha8", false),
new Version(7, 0, 0, "-rc1", false)
);
// ranked sufix
assertOrder(
new Version(7, 0, 0, "-alpha8", false),
new Version(7, 0, 0, "-beta1", false)
);
// ranked sufix
assertOrder(
new Version(7, 0, 0, "-beta8", false),
new Version(7, 0, 0, "-rc1", false)
);
// major takes precedence
assertOrder(
new Version(6, 10, 10, "-alpha8", true),
new Version(7, 0, 0, "-alpha2", false)
);
// then minor
assertOrder(
new Version(7, 0, 10, "-alpha8", true),
new Version(7, 1, 0, "-alpha2", false)
);
// then revision
assertOrder(
new Version(7, 1, 0, "-alpha8", true),
new Version(7, 1, 10, "-alpha2", false)
); );
} }
@ -149,18 +104,6 @@ public class VersionTests extends GradleUnitTestCase {
Version.fromString("foo.bar.baz"); Version.fromString("foo.bar.baz");
} }
public void testExceptionSuffixNumber() {
expectedEx.expect(IllegalArgumentException.class);
expectedEx.expectMessage("Invalid suffix");
new Version(7, 1, 1, "-alpha", true);
}
public void testExceptionSuffix() {
expectedEx.expect(IllegalArgumentException.class);
expectedEx.expectMessage("Suffix must contain one of:");
new Version(7, 1, 1, "foo1", true);
}
private void assertOrder(Version smaller, Version bigger) { private void assertOrder(Version smaller, Version bigger) {
assertEquals(smaller + " should be smaller than " + bigger, -1, smaller.compareTo(bigger)); assertEquals(smaller + " should be smaller than " + bigger, -1, smaller.compareTo(bigger));
} }

View File

@ -522,6 +522,9 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
IndicesAliasesRequest aliasesAddRequest = new IndicesAliasesRequest(); IndicesAliasesRequest aliasesAddRequest = new IndicesAliasesRequest();
AliasActions addAction = new AliasActions(AliasActions.Type.ADD).index(index).aliases(alias); AliasActions addAction = new AliasActions(AliasActions.Type.ADD).index(index).aliases(alias);
if (randomBoolean()) {
addAction.writeIndex(randomBoolean());
}
addAction.routing("routing").searchRouting("search_routing").filter("{\"term\":{\"year\":2016}}"); addAction.routing("routing").searchRouting("search_routing").filter("{\"term\":{\"year\":2016}}");
aliasesAddRequest.addAliasAction(addAction); aliasesAddRequest.addAliasAction(addAction);
AcknowledgedResponse aliasesAddResponse = execute(aliasesAddRequest, highLevelClient().indices()::updateAliases, AcknowledgedResponse aliasesAddResponse = execute(aliasesAddRequest, highLevelClient().indices()::updateAliases,
@ -535,6 +538,8 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
Map<String, Object> filter = (Map<String, Object>) getAlias.get("filter"); Map<String, Object> filter = (Map<String, Object>) getAlias.get("filter");
Map<String, Object> term = (Map<String, Object>) filter.get("term"); Map<String, Object> term = (Map<String, Object>) filter.get("term");
assertEquals(2016, term.get("year")); assertEquals(2016, term.get("year"));
Boolean isWriteIndex = (Boolean) getAlias.get("is_write_index");
assertThat(isWriteIndex, equalTo(addAction.writeIndex()));
String alias2 = "alias2"; String alias2 = "alias2";
IndicesAliasesRequest aliasesAddRemoveRequest = new IndicesAliasesRequest(); IndicesAliasesRequest aliasesAddRemoveRequest = new IndicesAliasesRequest();

View File

@ -17,37 +17,25 @@
* under the License. * under the License.
*/ */
import org.apache.tools.ant.taskdefs.condition.Os import org.apache.tools.ant.taskdefs.condition.Os
import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.LoggedExec
import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.Version
import org.elasticsearch.gradle.VersionCollection
import java.nio.charset.StandardCharsets import java.nio.charset.StandardCharsets
import static org.elasticsearch.gradle.BuildPlugin.getJavaHome import static org.elasticsearch.gradle.BuildPlugin.getJavaHome
/** /**
* This is a dummy project which does a local checkout of the previous * We want to be able to do BWC tests for unreleased versions without relying on and waiting for snapshots.
* wire compat version's branch, and builds a snapshot. This allows backcompat * For this we need to check out and build the unreleased versions.
* tests to test against the next unreleased version, closest to this version, * Since These depend on the current version, we can't name the Gradle projects statically, and don't know what the
* without relying on snapshots. * unreleased versions are when Gradle projects are set up, so we use "build-unreleased-version-*" as placeholders
* and configure them to build various versions here.
*/ */
subprojects { bwcVersions.forPreviousUnreleased { VersionCollection.UnreleasedVersionInfo unreleasedVersion -> project("${project.path}:${unreleasedVersion.gradleProjectName}") {
Version bwcVersion = unreleasedVersion.version
Version bwcVersion = bwcVersions.getSnapshotForProject(project.name) String bwcBranch = unreleasedVersion.branch
if (bwcVersion == null) {
// this project wont do anything
return
}
String bwcBranch
if (project.name == 'next-minor-snapshot') {
// this is always a .x series
bwcBranch = "${bwcVersion.major}.x"
} else {
bwcBranch = "${bwcVersion.major}.${bwcVersion.minor}"
}
apply plugin: 'distribution' apply plugin: 'distribution'
// Not published so no need to assemble // Not published so no need to assemble
assemble.enabled = false assemble.enabled = false
@ -152,7 +140,7 @@ subprojects {
workingDir = checkoutDir workingDir = checkoutDir
doFirst { doFirst {
// Execution time so that the checkouts are available // Execution time so that the checkouts are available
List<String> lines = file("$checkoutDir/.ci/java-versions.properties").readLines() List<String> lines = file("${checkoutDir}/.ci/java-versions.properties").readLines()
environment( environment(
'JAVA_HOME', 'JAVA_HOME',
getJavaHome(it, Integer.parseInt( getJavaHome(it, Integer.parseInt(
@ -197,15 +185,15 @@ subprojects {
} else if (showStacktraceName.equals("ALWAYS_FULL")) { } else if (showStacktraceName.equals("ALWAYS_FULL")) {
args "--full-stacktrace" args "--full-stacktrace"
} }
standardOutput = new IndentingOutputStream(System.out) standardOutput = new IndentingOutputStream(System.out, bwcVersion)
errorOutput = new IndentingOutputStream(System.err) errorOutput = new IndentingOutputStream(System.err, bwcVersion)
doLast { doLast {
List missing = artifactFiles.grep { file -> List missing = artifactFiles.grep { file ->
false == file.exists() false == file.exists()
} }
if (false == missing.empty) { if (false == missing.empty) {
throw new InvalidUserDataException( throw new InvalidUserDataException(
"Building bwc version didn't generate expected files ${missing}") "Building ${bwcVersion} didn't generate expected files ${missing}")
} }
} }
} }
@ -225,15 +213,16 @@ subprojects {
} }
} }
} }
} }}
class IndentingOutputStream extends OutputStream { class IndentingOutputStream extends OutputStream {
public static final byte[] INDENT = " [bwc] ".getBytes(StandardCharsets.UTF_8) public final byte[] indent
private final OutputStream delegate private final OutputStream delegate
public IndentingOutputStream(OutputStream delegate) { public IndentingOutputStream(OutputStream delegate, Object version) {
this.delegate = delegate this.delegate = delegate
indent = " [${version}] ".getBytes(StandardCharsets.UTF_8)
} }
@Override @Override
@ -245,7 +234,7 @@ class IndentingOutputStream extends OutputStream {
for (int i = 0; i < bytes.length; i++) { for (int i = 0; i < bytes.length; i++) {
delegate.write(bytes[i]) delegate.write(bytes[i])
if (bytes[i] == '\n') { if (bytes[i] == '\n') {
delegate.write(INDENT) delegate.write(indent)
} }
} }
} }

View File

@ -38,7 +38,7 @@ normalization can be specified with the `name` parameter, which accepts `nfc`,
convert `nfc` to `nfd` or `nfkc` to `nfkd` respectively: convert `nfc` to `nfd` or `nfkc` to `nfkd` respectively:
Which letters are normalized can be controlled by specifying the Which letters are normalized can be controlled by specifying the
`unicodeSetFilter` parameter, which accepts a `unicode_set_filter` parameter, which accepts a
http://icu-project.org/apiref/icu4j/com/ibm/icu/text/UnicodeSet.html[UnicodeSet]. http://icu-project.org/apiref/icu4j/com/ibm/icu/text/UnicodeSet.html[UnicodeSet].
Here are two examples, the default usage and a customised character filter: Here are two examples, the default usage and a customised character filter:
@ -194,7 +194,7 @@ with the `name` parameter, which accepts `nfc`, `nfkc`, and `nfkc_cf`
(default). (default).
Which letters are normalized can be controlled by specifying the Which letters are normalized can be controlled by specifying the
`unicodeSetFilter` parameter, which accepts a `unicode_set_filter` parameter, which accepts a
http://icu-project.org/apiref/icu4j/com/ibm/icu/text/UnicodeSet.html[UnicodeSet]. http://icu-project.org/apiref/icu4j/com/ibm/icu/text/UnicodeSet.html[UnicodeSet].
You should probably prefer the <<analysis-icu-normalization-charfilter,Normalization character filter>>. You should probably prefer the <<analysis-icu-normalization-charfilter,Normalization character filter>>.
@ -273,7 +273,7 @@ The ICU folding token filter already does Unicode normalization, so there is
no need to use Normalize character or token filter as well. no need to use Normalize character or token filter as well.
Which letters are folded can be controlled by specifying the Which letters are folded can be controlled by specifying the
`unicodeSetFilter` parameter, which accepts a `unicode_set_filter` parameter, which accepts a
http://icu-project.org/apiref/icu4j/com/ibm/icu/text/UnicodeSet.html[UnicodeSet]. http://icu-project.org/apiref/icu4j/com/ibm/icu/text/UnicodeSet.html[UnicodeSet].
The following example exempts Swedish characters from folding. It is important The following example exempts Swedish characters from folding. It is important
@ -300,7 +300,7 @@ PUT icu_sample
"filter": { "filter": {
"swedish_folding": { "swedish_folding": {
"type": "icu_folding", "type": "icu_folding",
"unicodeSetFilter": "[^åäöÅÄÖ]" "unicode_set_filter": "[^åäöÅÄÖ]"
} }
} }
} }

View File

@ -32,7 +32,7 @@ The GCS repository plugin adds support for using Google Cloud Storage service as
The following plugin has been contributed by our community: The following plugin has been contributed by our community:
* https://github.com/wikimedia/search-repository-swift[Openstack Swift] (by Wikimedia Foundation) * https://github.com/BigDataBoutique/elasticsearch-repository-swift[Openstack Swift] (by Wikimedia Foundation and BigData Boutique)
include::repository-azure.asciidoc[] include::repository-azure.asciidoc[]

View File

@ -277,7 +277,7 @@ The number of requests per second effectively executed during the delete by quer
`throttled_until_millis`:: `throttled_until_millis`::
This field should always be equal to zero in a delete by query response. It only This field should always be equal to zero in a `_delete_by_query` response. It only
has meaning when using the <<docs-delete-by-query-task-api, Task API>>, where it has meaning when using the <<docs-delete-by-query-task-api, Task API>>, where it
indicates the next time (in milliseconds since epoch) a throttled request will be indicates the next time (in milliseconds since epoch) a throttled request will be
executed again in order to conform to `requests_per_second`. executed again in order to conform to `requests_per_second`.

View File

@ -671,7 +671,7 @@ The number of requests per second effectively executed during the reindex.
`throttled_until_millis`:: `throttled_until_millis`::
This field should always be equal to zero in a `_delete_by_query` response. It only This field should always be equal to zero in a `_reindex` response. It only
has meaning when using the <<docs-reindex-task-api, Task API>>, where it has meaning when using the <<docs-reindex-task-api, Task API>>, where it
indicates the next time (in milliseconds since epoch) a throttled request will be indicates the next time (in milliseconds since epoch) a throttled request will be
executed again in order to conform to `requests_per_second`. executed again in order to conform to `requests_per_second`.

View File

@ -237,7 +237,7 @@ batch size is `1000`, so if the `requests_per_second` is set to `500`:
[source,txt] [source,txt]
-------------------------------------------------- --------------------------------------------------
target_time = 1000 / 500 per second = 2 seconds target_time = 1000 / 500 per second = 2 seconds
wait_time = target_time - delete_time = 2 seconds - .5 seconds = 1.5 seconds wait_time = target_time - write_time = 2 seconds - .5 seconds = 1.5 seconds
-------------------------------------------------- --------------------------------------------------
Since the batch is issued as a single `_bulk` request large batch sizes will Since the batch is issued as a single `_bulk` request large batch sizes will
@ -332,7 +332,7 @@ The number of requests per second effectively executed during the update by quer
`throttled_until_millis`:: `throttled_until_millis`::
This field should always be equal to zero in a delete by query response. It only This field should always be equal to zero in an `_update_by_query` response. It only
has meaning when using the <<docs-update-by-query-task-api, Task API>>, where it has meaning when using the <<docs-update-by-query-task-api, Task API>>, where it
indicates the next time (in milliseconds since epoch) a throttled request will be indicates the next time (in milliseconds since epoch) a throttled request will be
executed again in order to conform to `requests_per_second`. executed again in order to conform to `requests_per_second`.

View File

@ -102,3 +102,8 @@ status 200 - OK is now returned instead at all times.
The Put User API response was changed in 6.5.0 to add the `created` field The Put User API response was changed in 6.5.0 to add the `created` field
outside of the user object where it previously had been. In 7.0.0 the user outside of the user object where it previously had been. In 7.0.0 the user
object has been removed in favor of the top level `created` field. object has been removed in favor of the top level `created` field.
[float]
==== Source filtering url parameters `_source_include` and `_source_exclude` have been removed
The deprecated in 6.x url parameters are now removed. Use `_source_includes` and `_source_excludes` instead.

View File

@ -5,6 +5,7 @@ The query cache is responsible for caching the results of queries.
There is one queries cache per node that is shared by all shards. There is one queries cache per node that is shared by all shards.
The cache implements an LRU eviction policy: when a cache becomes full, the The cache implements an LRU eviction policy: when a cache becomes full, the
least recently used data is evicted to make way for new data. least recently used data is evicted to make way for new data.
It is not possible to look at the contents being cached.
The query cache only caches queries which are being used in a filter context. The query cache only caches queries which are being used in a filter context.

View File

@ -31,6 +31,6 @@ public class ExpressionPlugin extends Plugin implements ScriptPlugin {
@Override @Override
public ScriptEngine getScriptEngine(Settings settings, Collection<ScriptContext<?>> contexts) { public ScriptEngine getScriptEngine(Settings settings, Collection<ScriptContext<?>> contexts) {
return new ExpressionScriptEngine(settings); return new ExpressionScriptEngine();
} }
} }

View File

@ -28,8 +28,6 @@ import org.apache.lucene.queries.function.valuesource.DoubleConstValueSource;
import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortField;
import org.elasticsearch.SpecialPermission; import org.elasticsearch.SpecialPermission;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper;
@ -63,14 +61,10 @@ import java.util.Map;
* *
* Only contexts returning numeric types or {@link Object} are supported. * Only contexts returning numeric types or {@link Object} are supported.
*/ */
public class ExpressionScriptEngine extends AbstractComponent implements ScriptEngine { public class ExpressionScriptEngine implements ScriptEngine {
public static final String NAME = "expression"; public static final String NAME = "expression";
public ExpressionScriptEngine(Settings settings) {
super(settings);
}
@Override @Override
public String getType() { public String getType() {
return NAME; return NAME;

View File

@ -19,7 +19,6 @@
package org.elasticsearch.script.expression; package org.elasticsearch.script.expression;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
@ -64,7 +63,7 @@ public class ExpressionFieldScriptTests extends ESTestCase {
when(fieldData.getFieldName()).thenReturn("field"); when(fieldData.getFieldName()).thenReturn("field");
when(fieldData.load(anyObject())).thenReturn(atomicFieldData); when(fieldData.load(anyObject())).thenReturn(atomicFieldData);
service = new ExpressionScriptEngine(Settings.EMPTY); service = new ExpressionScriptEngine();
lookup = new SearchLookup(mapperService, ignored -> fieldData, null); lookup = new SearchLookup(mapperService, ignored -> fieldData, null);
} }

View File

@ -22,7 +22,6 @@ package org.elasticsearch.script.expression;
import java.io.IOException; import java.io.IOException;
import java.text.ParseException; import java.text.ParseException;
import java.util.Collections; import java.util.Collections;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
@ -63,7 +62,7 @@ public class ExpressionNumberSortScriptTests extends ESTestCase {
when(fieldData.getFieldName()).thenReturn("field"); when(fieldData.getFieldName()).thenReturn("field");
when(fieldData.load(anyObject())).thenReturn(atomicFieldData); when(fieldData.load(anyObject())).thenReturn(atomicFieldData);
service = new ExpressionScriptEngine(Settings.EMPTY); service = new ExpressionScriptEngine();
lookup = new SearchLookup(mapperService, ignored -> fieldData, null); lookup = new SearchLookup(mapperService, ignored -> fieldData, null);
} }

View File

@ -22,7 +22,6 @@ package org.elasticsearch.script.expression;
import java.io.IOException; import java.io.IOException;
import java.text.ParseException; import java.text.ParseException;
import java.util.Collections; import java.util.Collections;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
@ -63,7 +62,7 @@ public class ExpressionTermsSetQueryTests extends ESTestCase {
when(fieldData.getFieldName()).thenReturn("field"); when(fieldData.getFieldName()).thenReturn("field");
when(fieldData.load(anyObject())).thenReturn(atomicFieldData); when(fieldData.load(anyObject())).thenReturn(atomicFieldData);
service = new ExpressionScriptEngine(Settings.EMPTY); service = new ExpressionScriptEngine();
lookup = new SearchLookup(mapperService, ignored -> fieldData, null); lookup = new SearchLookup(mapperService, ignored -> fieldData, null);
} }

View File

@ -20,7 +20,6 @@
package org.elasticsearch.painless; package org.elasticsearch.painless;
import org.elasticsearch.SpecialPermission; import org.elasticsearch.SpecialPermission;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.painless.Compiler.Loader; import org.elasticsearch.painless.Compiler.Loader;
import org.elasticsearch.painless.lookup.PainlessLookupBuilder; import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
@ -54,7 +53,7 @@ import static org.elasticsearch.painless.node.SSource.MainMethodReserved;
/** /**
* Implementation of a ScriptEngine for the Painless language. * Implementation of a ScriptEngine for the Painless language.
*/ */
public final class PainlessScriptEngine extends AbstractComponent implements ScriptEngine { public final class PainlessScriptEngine implements ScriptEngine {
/** /**
* Standard name of the Painless language. * Standard name of the Painless language.
@ -90,8 +89,6 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr
* @param settings The settings to initialize the engine with. * @param settings The settings to initialize the engine with.
*/ */
public PainlessScriptEngine(Settings settings, Map<ScriptContext<?>, List<Whitelist>> contexts) { public PainlessScriptEngine(Settings settings, Map<ScriptContext<?>, List<Whitelist>> contexts) {
super(settings);
defaultCompilerSettings.setRegexesEnabled(CompilerSettings.REGEX_ENABLED.get(settings)); defaultCompilerSettings.setRegexesEnabled(CompilerSettings.REGEX_ENABLED.get(settings));
Map<ScriptContext<?>, Compiler> contextsToCompilers = new HashMap<>(); Map<ScriptContext<?>, Compiler> contextsToCompilers = new HashMap<>();

View File

@ -31,7 +31,6 @@ import org.apache.lucene.search.Query;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text; import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
@ -56,8 +55,8 @@ import java.util.Map;
final class PercolatorHighlightSubFetchPhase implements FetchSubPhase { final class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
private final HighlightPhase highlightPhase; private final HighlightPhase highlightPhase;
PercolatorHighlightSubFetchPhase(Settings settings, Map<String, Highlighter> highlighters) { PercolatorHighlightSubFetchPhase(Map<String, Highlighter> highlighters) {
this.highlightPhase = new HighlightPhase(settings, highlighters); this.highlightPhase = new HighlightPhase(highlighters);
} }
boolean hitsExecutionNeeded(SearchContext context) { // for testing boolean hitsExecutionNeeded(SearchContext context) { // for testing

View File

@ -20,7 +20,6 @@
package org.elasticsearch.percolator; package org.elasticsearch.percolator;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
@ -35,13 +34,6 @@ import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap; import static java.util.Collections.singletonMap;
public class PercolatorPlugin extends Plugin implements MapperPlugin, SearchPlugin { public class PercolatorPlugin extends Plugin implements MapperPlugin, SearchPlugin {
private final Settings settings;
public PercolatorPlugin(Settings settings) {
this.settings = settings;
}
@Override @Override
public List<QuerySpec<?>> getQueries() { public List<QuerySpec<?>> getQueries() {
return singletonList(new QuerySpec<>(PercolateQueryBuilder.NAME, PercolateQueryBuilder::new, PercolateQueryBuilder::fromXContent)); return singletonList(new QuerySpec<>(PercolateQueryBuilder.NAME, PercolateQueryBuilder::new, PercolateQueryBuilder::fromXContent));
@ -51,7 +43,7 @@ public class PercolatorPlugin extends Plugin implements MapperPlugin, SearchPlug
public List<FetchSubPhase> getFetchSubPhases(FetchPhaseConstructionContext context) { public List<FetchSubPhase> getFetchSubPhases(FetchPhaseConstructionContext context) {
return Arrays.asList( return Arrays.asList(
new PercolatorMatchedSlotSubFetchPhase(), new PercolatorMatchedSlotSubFetchPhase(),
new PercolatorHighlightSubFetchPhase(settings, context.getHighlighters()) new PercolatorHighlightSubFetchPhase(context.getHighlighters())
); );
} }

View File

@ -28,7 +28,6 @@ import org.apache.lucene.search.MatchAllDocsQuery;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import org.elasticsearch.common.lucene.search.function.RandomScoreFunction; import org.elasticsearch.common.lucene.search.function.RandomScoreFunction;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
@ -47,8 +46,7 @@ public class PercolatorHighlightSubFetchPhaseTests extends ESTestCase {
public void testHitsExecutionNeeded() { public void testHitsExecutionNeeded() {
PercolateQuery percolateQuery = new PercolateQuery("_name", ctx -> null, Collections.singletonList(new BytesArray("{}")), PercolateQuery percolateQuery = new PercolateQuery("_name", ctx -> null, Collections.singletonList(new BytesArray("{}")),
new MatchAllDocsQuery(), Mockito.mock(IndexSearcher.class), null, new MatchAllDocsQuery()); new MatchAllDocsQuery(), Mockito.mock(IndexSearcher.class), null, new MatchAllDocsQuery());
PercolatorHighlightSubFetchPhase subFetchPhase = new PercolatorHighlightSubFetchPhase(Settings.EMPTY, PercolatorHighlightSubFetchPhase subFetchPhase = new PercolatorHighlightSubFetchPhase(emptyMap());
emptyMap());
SearchContext searchContext = Mockito.mock(SearchContext.class); SearchContext searchContext = Mockito.mock(SearchContext.class);
Mockito.when(searchContext.highlight()).thenReturn(new SearchContextHighlight(Collections.emptyList())); Mockito.when(searchContext.highlight()).thenReturn(new SearchContextHighlight(Collections.emptyList()));
Mockito.when(searchContext.query()).thenReturn(new MatchAllDocsQuery()); Mockito.when(searchContext.query()).thenReturn(new MatchAllDocsQuery());

View File

@ -53,7 +53,7 @@ public class ReindexSourceTargetValidationTests extends ESTestCase {
.put(index("baz"), true) .put(index("baz"), true)
.put(index("source", "source_multi"), true) .put(index("source", "source_multi"), true)
.put(index("source2", "source_multi"), true)).build(); .put(index("source2", "source_multi"), true)).build();
private static final IndexNameExpressionResolver INDEX_NAME_EXPRESSION_RESOLVER = new IndexNameExpressionResolver(Settings.EMPTY); private static final IndexNameExpressionResolver INDEX_NAME_EXPRESSION_RESOLVER = new IndexNameExpressionResolver();
private static final AutoCreateIndex AUTO_CREATE_INDEX = new AutoCreateIndex(Settings.EMPTY, private static final AutoCreateIndex AUTO_CREATE_INDEX = new AutoCreateIndex(Settings.EMPTY,
new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), INDEX_NAME_EXPRESSION_RESOLVER); new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), INDEX_NAME_EXPRESSION_RESOLVER);

View File

@ -23,7 +23,6 @@ import org.elasticsearch.common.blobstore.BlobContainer;
import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.BlobStore;
import org.elasticsearch.common.blobstore.BlobStoreException; import org.elasticsearch.common.blobstore.BlobStoreException;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.ByteSizeValue;
@ -34,7 +33,7 @@ import java.net.URL;
/** /**
* Read-only URL-based blob store * Read-only URL-based blob store
*/ */
public class URLBlobStore extends AbstractComponent implements BlobStore { public class URLBlobStore implements BlobStore {
private final URL path; private final URL path;
@ -53,7 +52,6 @@ public class URLBlobStore extends AbstractComponent implements BlobStore {
* @param path base URL * @param path base URL
*/ */
public URLBlobStore(Settings settings, URL path) { public URLBlobStore(Settings settings, URL path) {
super(settings);
this.path = path; this.path = path;
this.bufferSizeInBytes = (int) settings.getAsBytesSize("repositories.uri.buffer_size", this.bufferSizeInBytes = (int) settings.getAsBytesSize("repositories.uri.buffer_size",
new ByteSizeValue(100, ByteSizeUnit.KB)).getBytes(); new ByteSizeValue(100, ByteSizeUnit.KB)).getBytes();

View File

@ -50,7 +50,7 @@ public class IcuFoldingTokenFilterFactory extends AbstractTokenFilterFactory imp
public IcuFoldingTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { public IcuFoldingTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(indexSettings, name, settings); super(indexSettings, name, settings);
this.normalizer = IcuNormalizerTokenFilterFactory.wrapWithUnicodeSetFilter(ICU_FOLDING_NORMALIZER, settings); this.normalizer = IcuNormalizerTokenFilterFactory.wrapWithUnicodeSetFilter(indexSettings, ICU_FOLDING_NORMALIZER, settings);
} }
@Override @Override

View File

@ -49,7 +49,7 @@ public class IcuNormalizerCharFilterFactory extends AbstractCharFilterFactory im
} }
Normalizer2 normalizer = Normalizer2.getInstance( Normalizer2 normalizer = Normalizer2.getInstance(
null, method, "compose".equals(mode) ? Normalizer2.Mode.COMPOSE : Normalizer2.Mode.DECOMPOSE); null, method, "compose".equals(mode) ? Normalizer2.Mode.COMPOSE : Normalizer2.Mode.DECOMPOSE);
this.normalizer = IcuNormalizerTokenFilterFactory.wrapWithUnicodeSetFilter(normalizer, settings); this.normalizer = IcuNormalizerTokenFilterFactory.wrapWithUnicodeSetFilter(indexSettings, normalizer, settings);
} }
@Override @Override

View File

@ -23,7 +23,10 @@ import com.ibm.icu.text.FilteredNormalizer2;
import com.ibm.icu.text.Normalizer2; import com.ibm.icu.text.Normalizer2;
import com.ibm.icu.text.UnicodeSet; import com.ibm.icu.text.UnicodeSet;
import org.apache.logging.log4j.LogManager;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
import org.elasticsearch.Version;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
@ -35,14 +38,15 @@ import org.elasticsearch.index.IndexSettings;
* <p>The {@code unicodeSetFilter} attribute can be used to provide the UniCodeSet for filtering.</p> * <p>The {@code unicodeSetFilter} attribute can be used to provide the UniCodeSet for filtering.</p>
*/ */
public class IcuNormalizerTokenFilterFactory extends AbstractTokenFilterFactory implements MultiTermAwareComponent { public class IcuNormalizerTokenFilterFactory extends AbstractTokenFilterFactory implements MultiTermAwareComponent {
private static final DeprecationLogger deprecationLogger =
new DeprecationLogger(LogManager.getLogger(IcuNormalizerTokenFilterFactory.class));
private final Normalizer2 normalizer; private final Normalizer2 normalizer;
public IcuNormalizerTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { public IcuNormalizerTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(indexSettings, name, settings); super(indexSettings, name, settings);
String method = settings.get("name", "nfkc_cf"); String method = settings.get("name", "nfkc_cf");
Normalizer2 normalizer = Normalizer2.getInstance(null, method, Normalizer2.Mode.COMPOSE); Normalizer2 normalizer = Normalizer2.getInstance(null, method, Normalizer2.Mode.COMPOSE);
this.normalizer = wrapWithUnicodeSetFilter(normalizer, settings); this.normalizer = wrapWithUnicodeSetFilter(indexSettings, normalizer, settings);
} }
@Override @Override
@ -55,8 +59,17 @@ public class IcuNormalizerTokenFilterFactory extends AbstractTokenFilterFactory
return this; return this;
} }
static Normalizer2 wrapWithUnicodeSetFilter(final Normalizer2 normalizer, Settings settings) { static Normalizer2 wrapWithUnicodeSetFilter(final IndexSettings indexSettings,
final Normalizer2 normalizer,
final Settings settings) {
String unicodeSetFilter = settings.get("unicodeSetFilter"); String unicodeSetFilter = settings.get("unicodeSetFilter");
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0_alpha1)) {
if (unicodeSetFilter != null) {
deprecationLogger.deprecated("[unicodeSetFilter] has been deprecated in favor of [unicode_set_filter]");
} else {
unicodeSetFilter = settings.get("unicode_set_filter");
}
}
if (unicodeSetFilter != null) { if (unicodeSetFilter != null) {
UnicodeSet unicodeSet = new UnicodeSet(unicodeSetFilter); UnicodeSet unicodeSet = new UnicodeSet(unicodeSetFilter);

View File

@ -48,6 +48,61 @@
--- ---
"Normalization with a UnicodeSet Filter": "Normalization with a UnicodeSet Filter":
- do: - do:
indices.create:
index: test
body:
settings:
index:
analysis:
char_filter:
charfilter_icu_normalizer:
type: icu_normalizer
unicode_set_filter: "[^ß]"
filter:
tokenfilter_icu_normalizer:
type: icu_normalizer
unicode_set_filter: "[^ßB]"
tokenfilter_icu_folding:
type: icu_folding
unicode_set_filter: "[^â]"
- do:
indices.analyze:
index: test
body:
char_filter: ["charfilter_icu_normalizer"]
tokenizer: keyword
text: charfilter Föo Bâr Ruß
- length: { tokens: 1 }
- match: { tokens.0.token: charfilter föo bâr ruß }
- do:
indices.analyze:
index: test
body:
tokenizer: keyword
filter: ["tokenfilter_icu_normalizer"]
text: tokenfilter Föo Bâr Ruß
- length: { tokens: 1 }
- match: { tokens.0.token: tokenfilter föo Bâr ruß }
- do:
indices.analyze:
index: test
body:
tokenizer: keyword
filter: ["tokenfilter_icu_folding"]
text: icufolding Föo Bâr Ruß
- length: { tokens: 1 }
- match: { tokens.0.token: icufolding foo bâr russ }
---
"Normalization with a CamcelCase UnicodeSet Filter":
- skip:
version: " - 6.99.99"
reason: unicodeSetFilter deprecated in 7.0.0, replaced by unicode_set_filter
features: "warnings"
- do:
warnings:
- "[unicodeSetFilter] has been deprecated in favor of [unicode_set_filter]"
indices.create: indices.create:
index: test index: test
body: body:

View File

@ -108,7 +108,6 @@ public class AzureUnicastHostsProvider extends AbstractComponent implements Unic
public AzureUnicastHostsProvider(Settings settings, AzureComputeService azureComputeService, public AzureUnicastHostsProvider(Settings settings, AzureComputeService azureComputeService,
TransportService transportService, NetworkService networkService) { TransportService transportService, NetworkService networkService) {
super(settings);
this.settings = settings; this.settings = settings;
this.azureComputeService = azureComputeService; this.azureComputeService = azureComputeService;
this.transportService = transportService; this.transportService = transportService;

View File

@ -33,7 +33,6 @@ import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.LazyInitializable; import org.elasticsearch.common.util.LazyInitializable;
import java.util.Random; import java.util.Random;
@ -46,10 +45,6 @@ class AwsEc2ServiceImpl extends AbstractComponent implements AwsEc2Service {
private final AtomicReference<LazyInitializable<AmazonEc2Reference, ElasticsearchException>> lazyClientReference = private final AtomicReference<LazyInitializable<AmazonEc2Reference, ElasticsearchException>> lazyClientReference =
new AtomicReference<>(); new AtomicReference<>();
AwsEc2ServiceImpl(Settings settings) {
super(settings);
}
private AmazonEC2 buildClient(Ec2ClientSettings clientSettings) { private AmazonEC2 buildClient(Ec2ClientSettings clientSettings) {
final AWSCredentialsProvider credentials = buildCredentials(logger, clientSettings); final AWSCredentialsProvider credentials = buildCredentials(logger, clientSettings);
final ClientConfiguration configuration = buildConfiguration(logger, clientSettings); final ClientConfiguration configuration = buildConfiguration(logger, clientSettings);

View File

@ -69,7 +69,6 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos
private final TransportAddressesCache dynamicHosts; private final TransportAddressesCache dynamicHosts;
AwsEc2UnicastHostsProvider(Settings settings, TransportService transportService, AwsEc2Service awsEc2Service) { AwsEc2UnicastHostsProvider(Settings settings, TransportService transportService, AwsEc2Service awsEc2Service) {
super(settings);
this.transportService = transportService; this.transportService = transportService;
this.awsEc2Service = awsEc2Service; this.awsEc2Service = awsEc2Service;

View File

@ -78,7 +78,7 @@ public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, Reloa
protected final AwsEc2Service ec2Service; protected final AwsEc2Service ec2Service;
public Ec2DiscoveryPlugin(Settings settings) { public Ec2DiscoveryPlugin(Settings settings) {
this(settings, new AwsEc2ServiceImpl(settings)); this(settings, new AwsEc2ServiceImpl());
} }
protected Ec2DiscoveryPlugin(Settings settings, AwsEc2ServiceImpl ec2Service) { protected Ec2DiscoveryPlugin(Settings settings, AwsEc2ServiceImpl ec2Service) {
@ -91,7 +91,7 @@ public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, Reloa
@Override @Override
public NetworkService.CustomNameResolver getCustomNameResolver(Settings settings) { public NetworkService.CustomNameResolver getCustomNameResolver(Settings settings) {
logger.debug("Register _ec2_, _ec2:xxx_ network names"); logger.debug("Register _ec2_, _ec2:xxx_ network names");
return new Ec2NameResolver(settings); return new Ec2NameResolver();
} }
@Override @Override

View File

@ -23,7 +23,6 @@ import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.network.NetworkService.CustomNameResolver; import org.elasticsearch.common.network.NetworkService.CustomNameResolver;
import org.elasticsearch.common.settings.Settings;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.IOException; import java.io.IOException;
@ -79,13 +78,6 @@ class Ec2NameResolver extends AbstractComponent implements CustomNameResolver {
} }
} }
/**
* Construct a {@link CustomNameResolver}.
*/
Ec2NameResolver(Settings settings) {
super(settings);
}
/** /**
* @param type the ec2 hostname type to discover. * @param type the ec2 hostname type to discover.
* @return the appropriate host resolved from ec2 meta-data, or null if it cannot be obtained. * @return the appropriate host resolved from ec2 meta-data, or null if it cannot be obtained.

View File

@ -24,8 +24,6 @@ import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.services.ec2.AmazonEC2; import com.amazonaws.services.ec2.AmazonEC2;
import com.amazonaws.services.ec2.model.Tag; import com.amazonaws.services.ec2.model.Tag;
import org.elasticsearch.common.settings.Settings;
import java.util.List; import java.util.List;
public class AwsEc2ServiceMock extends AwsEc2ServiceImpl { public class AwsEc2ServiceMock extends AwsEc2ServiceImpl {
@ -33,8 +31,7 @@ public class AwsEc2ServiceMock extends AwsEc2ServiceImpl {
private final int nodes; private final int nodes;
private final List<List<Tag>> tagsList; private final List<List<Tag>> tagsList;
public AwsEc2ServiceMock(Settings settings, int nodes, List<List<Tag>> tagsList) { public AwsEc2ServiceMock(int nodes, List<List<Tag>> tagsList) {
super(settings);
this.nodes = nodes; this.nodes = nodes;
this.tagsList = tagsList; this.tagsList = tagsList;
} }

View File

@ -32,7 +32,7 @@ public class Ec2DiscoveryPluginMock extends Ec2DiscoveryPlugin {
} }
public Ec2DiscoveryPluginMock(Settings settings, int nodes, List<List<Tag>> tagsList) { public Ec2DiscoveryPluginMock(Settings settings, int nodes, List<List<Tag>> tagsList) {
super(settings, new AwsEc2ServiceMock(settings, nodes, tagsList)); super(settings, new AwsEc2ServiceMock(nodes, tagsList));
} }
} }

View File

@ -298,7 +298,7 @@ public class Ec2DiscoveryTests extends ESTestCase {
} }
public void testGetNodeListEmptyCache() throws Exception { public void testGetNodeListEmptyCache() throws Exception {
AwsEc2Service awsEc2Service = new AwsEc2ServiceMock(Settings.EMPTY, 1, null); AwsEc2Service awsEc2Service = new AwsEc2ServiceMock(1, null);
DummyEc2HostProvider provider = new DummyEc2HostProvider(Settings.EMPTY, transportService, awsEc2Service) { DummyEc2HostProvider provider = new DummyEc2HostProvider(Settings.EMPTY, transportService, awsEc2Service) {
@Override @Override
protected List<TransportAddress> fetchDynamicNodes() { protected List<TransportAddress> fetchDynamicNodes() {

View File

@ -32,6 +32,9 @@ import static org.hamcrest.Matchers.containsString;
/** /**
* Test for EC2 network.host settings. * Test for EC2 network.host settings.
* <p>
* Warning: This test doesn't assert that the exceptions are thrown.
* They aren't.
*/ */
public class Ec2NetworkTests extends ESTestCase { public class Ec2NetworkTests extends ESTestCase {
/** /**
@ -42,10 +45,11 @@ public class Ec2NetworkTests extends ESTestCase {
.put("network.host", "_ec2_") .put("network.host", "_ec2_")
.build(); .build();
NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver(nodeSettings))); NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver()));
// TODO we need to replace that with a mock. For now we check the URL we are supposed to reach. // TODO we need to replace that with a mock. For now we check the URL we are supposed to reach.
try { try {
networkService.resolveBindHostAddresses(null); networkService.resolveBindHostAddresses(null);
// note: this can succeed and the test can pass
} catch (IOException e) { } catch (IOException e) {
assertThat(e.getMessage(), containsString("local-ipv4")); assertThat(e.getMessage(), containsString("local-ipv4"));
} }
@ -59,10 +63,11 @@ public class Ec2NetworkTests extends ESTestCase {
.put("network.host", "_ec2:publicIp_") .put("network.host", "_ec2:publicIp_")
.build(); .build();
NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver(nodeSettings))); NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver()));
// TODO we need to replace that with a mock. For now we check the URL we are supposed to reach. // TODO we need to replace that with a mock. For now we check the URL we are supposed to reach.
try { try {
networkService.resolveBindHostAddresses(null); networkService.resolveBindHostAddresses(null);
// note: this can succeed and the test can pass
} catch (IOException e) { } catch (IOException e) {
assertThat(e.getMessage(), containsString("public-ipv4")); assertThat(e.getMessage(), containsString("public-ipv4"));
} }
@ -76,10 +81,11 @@ public class Ec2NetworkTests extends ESTestCase {
.put("network.host", "_ec2:privateIp_") .put("network.host", "_ec2:privateIp_")
.build(); .build();
NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver(nodeSettings))); NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver()));
// TODO we need to replace that with a mock. For now we check the URL we are supposed to reach. // TODO we need to replace that with a mock. For now we check the URL we are supposed to reach.
try { try {
networkService.resolveBindHostAddresses(null); networkService.resolveBindHostAddresses(null);
// note: this can succeed and the test can pass
} catch (IOException e) { } catch (IOException e) {
assertThat(e.getMessage(), containsString("local-ipv4")); assertThat(e.getMessage(), containsString("local-ipv4"));
} }
@ -93,10 +99,11 @@ public class Ec2NetworkTests extends ESTestCase {
.put("network.host", "_ec2:privateIpv4_") .put("network.host", "_ec2:privateIpv4_")
.build(); .build();
NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver(nodeSettings))); NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver()));
// TODO we need to replace that with a mock. For now we check the URL we are supposed to reach. // TODO we need to replace that with a mock. For now we check the URL we are supposed to reach.
try { try {
networkService.resolveBindHostAddresses(null); networkService.resolveBindHostAddresses(null);
// note: this can succeed and the test can pass
} catch (IOException e) { } catch (IOException e) {
assertThat(e.getMessage(), containsString("local-ipv4")); assertThat(e.getMessage(), containsString("local-ipv4"));
} }
@ -110,10 +117,11 @@ public class Ec2NetworkTests extends ESTestCase {
.put("network.host", "_ec2:privateDns_") .put("network.host", "_ec2:privateDns_")
.build(); .build();
NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver(nodeSettings))); NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver()));
// TODO we need to replace that with a mock. For now we check the URL we are supposed to reach. // TODO we need to replace that with a mock. For now we check the URL we are supposed to reach.
try { try {
networkService.resolveBindHostAddresses(null); networkService.resolveBindHostAddresses(null);
// note: this can succeed and the test can pass
} catch (IOException e) { } catch (IOException e) {
assertThat(e.getMessage(), containsString("local-hostname")); assertThat(e.getMessage(), containsString("local-hostname"));
} }
@ -127,10 +135,11 @@ public class Ec2NetworkTests extends ESTestCase {
.put("network.host", "_ec2:publicIpv4_") .put("network.host", "_ec2:publicIpv4_")
.build(); .build();
NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver(nodeSettings))); NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver()));
// TODO we need to replace that with a mock. For now we check the URL we are supposed to reach. // TODO we need to replace that with a mock. For now we check the URL we are supposed to reach.
try { try {
networkService.resolveBindHostAddresses(null); networkService.resolveBindHostAddresses(null);
// note: this can succeed and the test can pass
} catch (IOException e) { } catch (IOException e) {
assertThat(e.getMessage(), containsString("public-ipv4")); assertThat(e.getMessage(), containsString("public-ipv4"));
} }
@ -144,10 +153,11 @@ public class Ec2NetworkTests extends ESTestCase {
.put("network.host", "_ec2:publicDns_") .put("network.host", "_ec2:publicDns_")
.build(); .build();
NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver(nodeSettings))); NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver()));
// TODO we need to replace that with a mock. For now we check the URL we are supposed to reach. // TODO we need to replace that with a mock. For now we check the URL we are supposed to reach.
try { try {
networkService.resolveBindHostAddresses(null); networkService.resolveBindHostAddresses(null);
// note: this can succeed and the test can pass
} catch (IOException e) { } catch (IOException e) {
assertThat(e.getMessage(), containsString("public-hostname")); assertThat(e.getMessage(), containsString("public-hostname"));
} }
@ -158,11 +168,7 @@ public class Ec2NetworkTests extends ESTestCase {
* network.host: _local_ * network.host: _local_
*/ */
public void testNetworkHostCoreLocal() throws IOException { public void testNetworkHostCoreLocal() throws IOException {
Settings nodeSettings = Settings.builder() NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver()));
.put("network.host", "_local_")
.build();
NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver(nodeSettings)));
InetAddress[] addresses = networkService.resolveBindHostAddresses(null); InetAddress[] addresses = networkService.resolveBindHostAddresses(null);
assertThat(addresses, arrayContaining(networkService.resolveBindHostAddresses(new String[] { "_local_" }))); assertThat(addresses, arrayContaining(networkService.resolveBindHostAddresses(new String[] { "_local_" })));
} }

View File

@ -108,7 +108,6 @@ public class GceInstancesServiceImpl extends AbstractComponent implements GceIns
private final boolean validateCerts; private final boolean validateCerts;
public GceInstancesServiceImpl(Settings settings) { public GceInstancesServiceImpl(Settings settings) {
super(settings);
this.settings = settings; this.settings = settings;
this.validateCerts = GCE_VALIDATE_CERTIFICATES.get(settings); this.validateCerts = GCE_VALIDATE_CERTIFICATES.get(settings);
this.project = resolveProject(); this.project = resolveProject();

View File

@ -22,9 +22,7 @@ package org.elasticsearch.cloud.gce.network;
import org.elasticsearch.cloud.gce.GceMetadataService; import org.elasticsearch.cloud.gce.GceMetadataService;
import org.elasticsearch.cloud.gce.util.Access; import org.elasticsearch.cloud.gce.util.Access;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.network.NetworkService.CustomNameResolver; import org.elasticsearch.common.network.NetworkService.CustomNameResolver;
import org.elasticsearch.common.settings.Settings;
import java.io.IOException; import java.io.IOException;
import java.net.InetAddress; import java.net.InetAddress;
@ -39,7 +37,7 @@ import java.net.InetAddress;
* <li>_gce:hostname_</li> * <li>_gce:hostname_</li>
* </ul> * </ul>
*/ */
public class GceNameResolver extends AbstractComponent implements CustomNameResolver { public class GceNameResolver implements CustomNameResolver {
private final GceMetadataService gceMetadataService; private final GceMetadataService gceMetadataService;
@ -73,8 +71,7 @@ public class GceNameResolver extends AbstractComponent implements CustomNameReso
/** /**
* Construct a {@link CustomNameResolver}. * Construct a {@link CustomNameResolver}.
*/ */
public GceNameResolver(Settings settings, GceMetadataService gceMetadataService) { public GceNameResolver(GceMetadataService gceMetadataService) {
super(settings);
this.gceMetadataService = gceMetadataService; this.gceMetadataService = gceMetadataService;
} }

View File

@ -74,7 +74,6 @@ public class GceUnicastHostsProvider extends AbstractComponent implements Unicas
public GceUnicastHostsProvider(Settings settings, GceInstancesService gceInstancesService, public GceUnicastHostsProvider(Settings settings, GceInstancesService gceInstancesService,
TransportService transportService, TransportService transportService,
NetworkService networkService) { NetworkService networkService) {
super(settings);
this.settings = settings; this.settings = settings;
this.gceInstancesService = gceInstancesService; this.gceInstancesService = gceInstancesService;
this.transportService = transportService; this.transportService = transportService;

View File

@ -95,7 +95,7 @@ public class GceDiscoveryPlugin extends Plugin implements DiscoveryPlugin, Close
@Override @Override
public NetworkService.CustomNameResolver getCustomNameResolver(Settings settings) { public NetworkService.CustomNameResolver getCustomNameResolver(Settings settings) {
logger.debug("Register _gce_, _gce:xxx network names"); logger.debug("Register _gce_, _gce:xxx network names");
return new GceNameResolver(settings, new GceMetadataService(settings)); return new GceNameResolver(new GceMetadataService(settings));
} }
@Override @Override

View File

@ -107,7 +107,7 @@ public class GceNetworkTests extends ESTestCase {
.build(); .build();
GceMetadataServiceMock mock = new GceMetadataServiceMock(nodeSettings); GceMetadataServiceMock mock = new GceMetadataServiceMock(nodeSettings);
NetworkService networkService = new NetworkService(Collections.singletonList(new GceNameResolver(nodeSettings, mock))); NetworkService networkService = new NetworkService(Collections.singletonList(new GceNameResolver(mock)));
try { try {
InetAddress[] addresses = networkService.resolveBindHostAddresses( InetAddress[] addresses = networkService.resolveBindHostAddresses(
NetworkService.GLOBAL_NETWORK_BINDHOST_SETTING.get(nodeSettings).toArray(Strings.EMPTY_ARRAY)); NetworkService.GLOBAL_NETWORK_BINDHOST_SETTING.get(nodeSettings).toArray(Strings.EMPTY_ARRAY));

View File

@ -28,7 +28,6 @@ import org.elasticsearch.common.blobstore.BlobMetaData;
import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.BlobStore;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.net.URISyntaxException; import java.net.URISyntaxException;
@ -47,9 +46,8 @@ public class AzureBlobStore extends AbstractComponent implements BlobStore {
private final String container; private final String container;
private final LocationMode locationMode; private final LocationMode locationMode;
public AzureBlobStore(RepositoryMetaData metadata, Settings settings, AzureStorageService service) public AzureBlobStore(RepositoryMetaData metadata, AzureStorageService service)
throws URISyntaxException, StorageException { throws URISyntaxException, StorageException {
super(settings);
this.container = Repository.CONTAINER_SETTING.get(metadata.settings()); this.container = Repository.CONTAINER_SETTING.get(metadata.settings());
this.clientName = Repository.CLIENT_NAME.get(metadata.settings()); this.clientName = Repository.CLIENT_NAME.get(metadata.settings());
this.service = service; this.service = service;

View File

@ -125,7 +125,7 @@ public class AzureRepository extends BlobStoreRepository {
*/ */
@Override @Override
protected AzureBlobStore createBlobStore() throws URISyntaxException, StorageException { protected AzureBlobStore createBlobStore() throws URISyntaxException, StorageException {
final AzureBlobStore blobStore = new AzureBlobStore(metadata, environment.settings(), storageService); final AzureBlobStore blobStore = new AzureBlobStore(metadata, storageService);
logger.debug((org.apache.logging.log4j.util.Supplier<?>) () -> new ParameterizedMessage( logger.debug((org.apache.logging.log4j.util.Supplier<?>) () -> new ParameterizedMessage(
"using container [{}], chunk_size [{}], compress [{}], base_path [{}]", "using container [{}], chunk_size [{}], compress [{}], base_path [{}]",

View File

@ -70,7 +70,6 @@ public class AzureStorageService extends AbstractComponent {
volatile Map<String, AzureStorageSettings> storageSettings = emptyMap(); volatile Map<String, AzureStorageSettings> storageSettings = emptyMap();
public AzureStorageService(Settings settings) { public AzureStorageService(Settings settings) {
super(settings);
// eagerly load client settings so that secure settings are read // eagerly load client settings so that secure settings are read
final Map<String, AzureStorageSettings> clientsSettings = AzureStorageSettings.load(settings); final Map<String, AzureStorageSettings> clientsSettings = AzureStorageSettings.load(settings);
refreshAndClearCache(clientsSettings); refreshAndClearCache(clientsSettings);

View File

@ -34,7 +34,7 @@ public class AzureBlobStoreContainerTests extends ESBlobStoreContainerTestCase {
try { try {
RepositoryMetaData repositoryMetaData = new RepositoryMetaData("azure", "ittest", Settings.EMPTY); RepositoryMetaData repositoryMetaData = new RepositoryMetaData("azure", "ittest", Settings.EMPTY);
AzureStorageServiceMock client = new AzureStorageServiceMock(); AzureStorageServiceMock client = new AzureStorageServiceMock();
return new AzureBlobStore(repositoryMetaData, Settings.EMPTY, client); return new AzureBlobStore(repositoryMetaData, client);
} catch (URISyntaxException | StorageException e) { } catch (URISyntaxException | StorageException e) {
throw new IOException(e); throw new IOException(e);
} }

View File

@ -34,7 +34,7 @@ public class AzureBlobStoreTests extends ESBlobStoreTestCase {
try { try {
RepositoryMetaData repositoryMetaData = new RepositoryMetaData("azure", "ittest", Settings.EMPTY); RepositoryMetaData repositoryMetaData = new RepositoryMetaData("azure", "ittest", Settings.EMPTY);
AzureStorageServiceMock client = new AzureStorageServiceMock(); AzureStorageServiceMock client = new AzureStorageServiceMock();
return new AzureBlobStore(repositoryMetaData, Settings.EMPTY, client); return new AzureBlobStore(repositoryMetaData, client);
} catch (URISyntaxException | StorageException e) { } catch (URISyntaxException | StorageException e) {
throw new IOException(e); throw new IOException(e);
} }

View File

@ -37,7 +37,6 @@ import org.elasticsearch.common.blobstore.BlobStoreException;
import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData;
import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.internal.io.Streams; import org.elasticsearch.core.internal.io.Streams;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
@ -69,8 +68,7 @@ class GoogleCloudStorageBlobStore extends AbstractComponent implements BlobStore
private final String clientName; private final String clientName;
private final GoogleCloudStorageService storageService; private final GoogleCloudStorageService storageService;
GoogleCloudStorageBlobStore(Settings settings, String bucketName, String clientName, GoogleCloudStorageService storageService) { GoogleCloudStorageBlobStore(String bucketName, String clientName, GoogleCloudStorageService storageService) {
super(settings);
this.bucketName = bucketName; this.bucketName = bucketName;
this.clientName = clientName; this.clientName = clientName;
this.storageService = storageService; this.storageService = storageService;

View File

@ -38,14 +38,14 @@ public class GoogleCloudStoragePlugin extends Plugin implements RepositoryPlugin
final GoogleCloudStorageService storageService; final GoogleCloudStorageService storageService;
public GoogleCloudStoragePlugin(final Settings settings) { public GoogleCloudStoragePlugin(final Settings settings) {
this.storageService = createStorageService(settings); this.storageService = createStorageService();
// eagerly load client settings so that secure settings are readable (not closed) // eagerly load client settings so that secure settings are readable (not closed)
reload(settings); reload(settings);
} }
// overridable for tests // overridable for tests
protected GoogleCloudStorageService createStorageService(Settings settings) { protected GoogleCloudStorageService createStorageService() {
return new GoogleCloudStorageService(settings); return new GoogleCloudStorageService();
} }
@Override @Override

View File

@ -91,7 +91,7 @@ class GoogleCloudStorageRepository extends BlobStoreRepository {
@Override @Override
protected GoogleCloudStorageBlobStore createBlobStore() { protected GoogleCloudStorageBlobStore createBlobStore() {
return new GoogleCloudStorageBlobStore(settings, bucket, clientName, storageService); return new GoogleCloudStorageBlobStore(bucket, clientName, storageService);
} }
@Override @Override

View File

@ -32,7 +32,6 @@ import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.LazyInitializable; import org.elasticsearch.common.util.LazyInitializable;
@ -54,10 +53,6 @@ public class GoogleCloudStorageService extends AbstractComponent {
*/ */
private final AtomicReference<Map<String, LazyInitializable<Storage, IOException>>> clientsCache = new AtomicReference<>(emptyMap()); private final AtomicReference<Map<String, LazyInitializable<Storage, IOException>>> clientsCache = new AtomicReference<>(emptyMap());
public GoogleCloudStorageService(final Settings settings) {
super(settings);
}
/** /**
* Refreshes the client settings and clears the client cache. Subsequent calls to * Refreshes the client settings and clears the client cache. Subsequent calls to
* {@code GoogleCloudStorageService#client} will return new clients constructed * {@code GoogleCloudStorageService#client} will return new clients constructed

View File

@ -20,7 +20,6 @@
package org.elasticsearch.repositories.gcs; package org.elasticsearch.repositories.gcs;
import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.BlobStore;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.repositories.ESBlobStoreContainerTestCase; import org.elasticsearch.repositories.ESBlobStoreContainerTestCase;
import java.util.Locale; import java.util.Locale;
@ -42,6 +41,6 @@ public class GoogleCloudStorageBlobStoreContainerTests extends ESBlobStoreContai
} catch (final Exception e) { } catch (final Exception e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucketName, clientName, storageService); return new GoogleCloudStorageBlobStore(bucketName, clientName, storageService);
} }
} }

View File

@ -79,17 +79,12 @@ public class GoogleCloudStorageBlobStoreRepositoryTests extends ESBlobStoreRepos
} }
@Override @Override
protected GoogleCloudStorageService createStorageService(Settings settings) { protected GoogleCloudStorageService createStorageService() {
return new MockGoogleCloudStorageService(settings); return new MockGoogleCloudStorageService();
} }
} }
public static class MockGoogleCloudStorageService extends GoogleCloudStorageService { public static class MockGoogleCloudStorageService extends GoogleCloudStorageService {
MockGoogleCloudStorageService(Settings settings) {
super(settings);
}
@Override @Override
public Storage client(String clientName) { public Storage client(String clientName) {
return new MockStorage(BUCKET, blobs); return new MockStorage(BUCKET, blobs);

View File

@ -20,7 +20,6 @@
package org.elasticsearch.repositories.gcs; package org.elasticsearch.repositories.gcs;
import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.BlobStore;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.repositories.ESBlobStoreTestCase; import org.elasticsearch.repositories.ESBlobStoreTestCase;
import java.util.Locale; import java.util.Locale;
@ -42,6 +41,6 @@ public class GoogleCloudStorageBlobStoreTests extends ESBlobStoreTestCase {
} catch (final Exception e) { } catch (final Exception e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucketName, clientName, storageService); return new GoogleCloudStorageBlobStore(bucketName, clientName, storageService);
} }
} }

View File

@ -63,7 +63,7 @@ public class GoogleCloudStorageServiceTests extends ESTestCase {
.put(GoogleCloudStorageClientSettings.ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), endpoint) .put(GoogleCloudStorageClientSettings.ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), endpoint)
.put(GoogleCloudStorageClientSettings.PROJECT_ID_SETTING.getConcreteSettingForNamespace(clientName).getKey(), projectIdName) .put(GoogleCloudStorageClientSettings.PROJECT_ID_SETTING.getConcreteSettingForNamespace(clientName).getKey(), projectIdName)
.build(); .build();
final GoogleCloudStorageService service = new GoogleCloudStorageService(settings); final GoogleCloudStorageService service = new GoogleCloudStorageService();
service.refreshAndClearCache(GoogleCloudStorageClientSettings.load(settings)); service.refreshAndClearCache(GoogleCloudStorageClientSettings.load(settings));
final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> service.client("another_client")); final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> service.client("another_client"));
assertThat(e.getMessage(), Matchers.startsWith("Unknown client name")); assertThat(e.getMessage(), Matchers.startsWith("Unknown client name"));

View File

@ -32,7 +32,6 @@ import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.BlobStore;
import org.elasticsearch.common.blobstore.BlobStoreException; import org.elasticsearch.common.blobstore.BlobStoreException;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.ByteSizeValue;
import java.io.IOException; import java.io.IOException;
@ -55,9 +54,8 @@ class S3BlobStore extends AbstractComponent implements BlobStore {
private final StorageClass storageClass; private final StorageClass storageClass;
S3BlobStore(Settings settings, S3Service service, String clientName, String bucket, boolean serverSideEncryption, S3BlobStore(S3Service service, String clientName, String bucket, boolean serverSideEncryption,
ByteSizeValue bufferSize, String cannedACL, String storageClass) { ByteSizeValue bufferSize, String cannedACL, String storageClass) {
super(settings);
this.service = service; this.service = service;
this.clientName = clientName; this.clientName = clientName;
this.bucket = bucket; this.bucket = bucket;

View File

@ -245,7 +245,7 @@ class S3Repository extends BlobStoreRepository {
protected S3BlobStore createBlobStore() { protected S3BlobStore createBlobStore() {
if (reference != null) { if (reference != null) {
assert S3ClientSettings.checkDeprecatedCredentials(metadata.settings()) : metadata.name(); assert S3ClientSettings.checkDeprecatedCredentials(metadata.settings()) : metadata.name();
return new S3BlobStore(settings, service, clientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass) { return new S3BlobStore(service, clientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass) {
@Override @Override
public AmazonS3Reference clientReference() { public AmazonS3Reference clientReference() {
if (reference.tryIncRef()) { if (reference.tryIncRef()) {
@ -256,7 +256,7 @@ class S3Repository extends BlobStoreRepository {
} }
}; };
} else { } else {
return new S3BlobStore(settings, service, clientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass); return new S3BlobStore(service, clientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass);
} }
} }

View File

@ -64,7 +64,7 @@ public class S3RepositoryPlugin extends Plugin implements RepositoryPlugin, Relo
protected final S3Service service; protected final S3Service service;
public S3RepositoryPlugin(final Settings settings) { public S3RepositoryPlugin(final Settings settings) {
this(settings, new S3Service(settings)); this(settings, new S3Service());
} }
S3RepositoryPlugin(final Settings settings, final S3Service service) { S3RepositoryPlugin(final Settings settings, final S3Service service) {

View File

@ -33,7 +33,6 @@ import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
import java.io.Closeable; import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
@ -47,10 +46,6 @@ class S3Service extends AbstractComponent implements Closeable {
private volatile Map<String, AmazonS3Reference> clientsCache = emptyMap(); private volatile Map<String, AmazonS3Reference> clientsCache = emptyMap();
private volatile Map<String, S3ClientSettings> clientsSettings = emptyMap(); private volatile Map<String, S3ClientSettings> clientsSettings = emptyMap();
S3Service(Settings settings) {
super(settings);
}
/** /**
* Refreshes the settings for the AmazonS3 clients and clears the cache of * Refreshes the settings for the AmazonS3 clients and clears the cache of
* existing clients. New clients will be build using these new settings. Old * existing clients. New clients will be build using these new settings. Old

View File

@ -68,11 +68,6 @@ public class RepositoryCredentialsTests extends ESTestCase {
} }
static final class ProxyS3Service extends S3Service { static final class ProxyS3Service extends S3Service {
ProxyS3Service(Settings settings) {
super(settings);
}
@Override @Override
AmazonS3 buildClient(final S3ClientSettings clientSettings) { AmazonS3 buildClient(final S3ClientSettings clientSettings) {
final AmazonS3 client = super.buildClient(clientSettings); final AmazonS3 client = super.buildClient(clientSettings);
@ -82,7 +77,7 @@ public class RepositoryCredentialsTests extends ESTestCase {
} }
ProxyS3RepositoryPlugin(Settings settings) { ProxyS3RepositoryPlugin(Settings settings) {
super(settings, new ProxyS3Service(settings)); super(settings, new ProxyS3Service());
} }
@Override @Override

View File

@ -118,7 +118,7 @@ public class S3BlobStoreRepositoryTests extends ESBlobStoreRepositoryIntegTestCa
@Override @Override
public Map<String, Repository.Factory> getRepositories(final Environment env, final NamedXContentRegistry registry) { public Map<String, Repository.Factory> getRepositories(final Environment env, final NamedXContentRegistry registry) {
return Collections.singletonMap(S3Repository.TYPE, return Collections.singletonMap(S3Repository.TYPE,
(metadata) -> new S3Repository(metadata, env.settings(), registry, new S3Service(env.settings()) { (metadata) -> new S3Repository(metadata, env.settings(), registry, new S3Service() {
@Override @Override
AmazonS3 buildClient(S3ClientSettings clientSettings) { AmazonS3 buildClient(S3ClientSettings clientSettings) {
return new MockAmazonS3(blobs, bucket, serverSideEncryption, cannedACL, storageClass); return new MockAmazonS3(blobs, bucket, serverSideEncryption, cannedACL, storageClass);

View File

@ -24,7 +24,6 @@ import com.amazonaws.services.s3.model.CannedAccessControlList;
import com.amazonaws.services.s3.model.StorageClass; import com.amazonaws.services.s3.model.StorageClass;
import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.BlobStore;
import org.elasticsearch.common.blobstore.BlobStoreException; import org.elasticsearch.common.blobstore.BlobStoreException;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.repositories.ESBlobStoreTestCase; import org.elasticsearch.repositories.ESBlobStoreTestCase;
@ -117,13 +116,13 @@ public class S3BlobStoreTests extends ESBlobStoreTestCase {
final String theClientName = randomAlphaOfLength(4); final String theClientName = randomAlphaOfLength(4);
final AmazonS3 client = new MockAmazonS3(new ConcurrentHashMap<>(), bucket, serverSideEncryption, cannedACL, storageClass); final AmazonS3 client = new MockAmazonS3(new ConcurrentHashMap<>(), bucket, serverSideEncryption, cannedACL, storageClass);
final S3Service service = new S3Service(Settings.EMPTY) { final S3Service service = new S3Service() {
@Override @Override
public synchronized AmazonS3Reference client(String clientName) { public synchronized AmazonS3Reference client(String clientName) {
assert theClientName.equals(clientName); assert theClientName.equals(clientName);
return new AmazonS3Reference(client); return new AmazonS3Reference(client);
} }
}; };
return new S3BlobStore(Settings.EMPTY, service, theClientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass); return new S3BlobStore(service, theClientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass);
} }
} }

View File

@ -57,10 +57,6 @@ public class S3RepositoryTests extends ESTestCase {
} }
private static class DummyS3Service extends S3Service { private static class DummyS3Service extends S3Service {
DummyS3Service() {
super(Settings.EMPTY);
}
@Override @Override
public AmazonS3Reference client(String clientName) { public AmazonS3Reference client(String clientName) {
return new AmazonS3Reference(new DummyS3Client()); return new AmazonS3Reference(new DummyS3Client());

View File

@ -98,7 +98,7 @@ test.enabled = false // no unit tests for rolling upgrades, only the rest integr
// basic integ tests includes testing bwc against the most recent version // basic integ tests includes testing bwc against the most recent version
task integTest { task integTest {
if (project.bwc_tests_enabled) { if (project.bwc_tests_enabled) {
for (final def version : bwcVersions.snapshotsIndexCompatible) { for (final def version : bwcVersions.unreleasedIndexCompatible) {
dependsOn "v${version}#bwcTest" dependsOn "v${version}#bwcTest"
} }
} }

View File

@ -65,7 +65,7 @@ test.enabled = false // no unit tests for rolling upgrades, only the rest integr
// basic integ tests includes testing bwc against the most recent version // basic integ tests includes testing bwc against the most recent version
task integTest { task integTest {
if (project.bwc_tests_enabled) { if (project.bwc_tests_enabled) {
for (final def version : bwcVersions.snapshotsWireCompatible) { for (final def version : bwcVersions.unreleasedWireCompatible) {
dependsOn "v${version}#bwcTest" dependsOn "v${version}#bwcTest"
} }
} }

View File

@ -145,7 +145,7 @@ test.enabled = false // no unit tests for rolling upgrades, only the rest integr
// basic integ tests includes testing bwc against the most recent version // basic integ tests includes testing bwc against the most recent version
task integTest { task integTest {
if (project.bwc_tests_enabled) { if (project.bwc_tests_enabled) {
for (final def version : bwcVersions.snapshotsWireCompatible) { for (final def version : bwcVersions.unreleasedWireCompatible) {
dependsOn "v${version}#bwcTest" dependsOn "v${version}#bwcTest"
} }
} }

View File

@ -298,7 +298,7 @@ public class ContextAndHeaderTransportIT extends HttpSmokeTestCase {
ResourceWatcherService resourceWatcherService, ScriptService scriptService, ResourceWatcherService resourceWatcherService, ScriptService scriptService,
NamedXContentRegistry xContentRegistry, Environment environment, NamedXContentRegistry xContentRegistry, Environment environment,
NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) {
loggingFilter.set(new LoggingFilter(clusterService.getSettings(), threadPool)); loggingFilter.set(new LoggingFilter(threadPool));
return Collections.emptyList(); return Collections.emptyList();
} }
@ -313,8 +313,7 @@ public class ContextAndHeaderTransportIT extends HttpSmokeTestCase {
private final ThreadPool threadPool; private final ThreadPool threadPool;
public LoggingFilter(Settings settings, ThreadPool pool) { public LoggingFilter(ThreadPool pool) {
super(settings);
this.threadPool = pool; this.threadPool = pool;
} }

View File

@ -57,7 +57,7 @@ test.enabled = false
task integTest { task integTest {
if (project.bwc_tests_enabled) { if (project.bwc_tests_enabled) {
final def version = bwcVersions.snapshotsIndexCompatible.first() final def version = bwcVersions.unreleasedIndexCompatible.first()
dependsOn "v${version}#bwcTest" dependsOn "v${version}#bwcTest"
} }
} }

View File

@ -70,37 +70,3 @@
- match: { _id: "1" } - match: { _id: "1" }
- match: { fields.count: [1] } - match: { fields.count: [1] }
- match: { _source.include.field1: v1 } - match: { _source.include.field1: v1 }
---
"Deprecated _source_include and _source_exclude":
- skip:
version: " - 6.5.99"
reason: _source_include and _source_exclude are deprecated from 6.6.0
features: "warnings"
- do:
indices.create:
index: test_1
body:
mappings:
_doc:
properties:
count:
type: integer
store: true
- do:
index:
index: test_1
type: _doc
id: 1
body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 }
- do:
get: { index: test_1, type: _doc, id: 1, _source_include: include.field1 }
warnings:
- "Deprecated parameter [_source_include] used, expected [_source_includes] instead"
- do:
get: { index: test_1, type: _doc, id: 1, _source_includes: include, _source_exclude: "*.field2" }
warnings:
- "Deprecated parameter [_source_exclude] used, expected [_source_excludes] instead"

View File

@ -1,6 +1,6 @@
setup: setup:
- skip: - skip:
version: " - 6.6.0" version: " - 6.5.99"
reason: "added in 6.6.0" reason: "added in 6.6.0"
- do: - do:
indices.create: indices.create:

View File

@ -384,7 +384,7 @@ public class ActionModule extends AbstractModule {
if (transportClient) { if (transportClient) {
restController = null; restController = null;
} else { } else {
restController = new RestController(settings, headers, restWrapper, nodeClient, circuitBreakerService, usageService); restController = new RestController(headers, restWrapper, nodeClient, circuitBreakerService, usageService);
} }
} }

View File

@ -20,7 +20,6 @@
package org.elasticsearch.action; package org.elasticsearch.action;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
@ -28,14 +27,13 @@ import org.elasticsearch.transport.TransportService;
/** /**
* A generic proxy that will execute the given action against a specific node. * A generic proxy that will execute the given action against a specific node.
*/ */
public class TransportActionNodeProxy<Request extends ActionRequest, Response extends ActionResponse> extends AbstractComponent { public class TransportActionNodeProxy<Request extends ActionRequest, Response extends ActionResponse> {
private final TransportService transportService; private final TransportService transportService;
private final Action<Response> action; private final Action<Response> action;
private final TransportRequestOptions transportOptions; private final TransportRequestOptions transportOptions;
public TransportActionNodeProxy(Settings settings, Action<Response> action, TransportService transportService) { public TransportActionNodeProxy(Settings settings, Action<Response> action, TransportService transportService) {
super(settings);
this.action = action; this.action = action;
this.transportService = transportService; this.transportService = transportService;
this.transportOptions = action.transportOptions(settings); this.transportOptions = action.transportOptions(settings);

View File

@ -486,6 +486,9 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
if (false == Strings.isEmpty(searchRouting)) { if (false == Strings.isEmpty(searchRouting)) {
builder.field(SEARCH_ROUTING.getPreferredName(), searchRouting); builder.field(SEARCH_ROUTING.getPreferredName(), searchRouting);
} }
if (null != writeIndex) {
builder.field(IS_WRITE_INDEX.getPreferredName(), writeIndex);
}
builder.endObject(); builder.endObject();
builder.endObject(); builder.endObject();
return builder; return builder;
@ -505,6 +508,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
+ ",routing=" + routing + ",routing=" + routing
+ ",indexRouting=" + indexRouting + ",indexRouting=" + indexRouting
+ ",searchRouting=" + searchRouting + ",searchRouting=" + searchRouting
+ ",writeIndex=" + writeIndex
+ "]"; + "]";
} }
@ -521,12 +525,13 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
&& Objects.equals(filter, other.filter) && Objects.equals(filter, other.filter)
&& Objects.equals(routing, other.routing) && Objects.equals(routing, other.routing)
&& Objects.equals(indexRouting, other.indexRouting) && Objects.equals(indexRouting, other.indexRouting)
&& Objects.equals(searchRouting, other.searchRouting); && Objects.equals(searchRouting, other.searchRouting)
&& Objects.equals(writeIndex, other.writeIndex);
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(type, indices, aliases, filter, routing, indexRouting, searchRouting); return Objects.hash(type, indices, aliases, filter, routing, indexRouting, searchRouting, writeIndex);
} }
} }

View File

@ -82,7 +82,7 @@ public class TransportRolloverAction extends TransportMasterNodeAction<RolloverR
this.createIndexService = createIndexService; this.createIndexService = createIndexService;
this.indexAliasesService = indexAliasesService; this.indexAliasesService = indexAliasesService;
this.client = client; this.client = client;
this.activeShardsObserver = new ActiveShardsObserver(settings, clusterService, threadPool); this.activeShardsObserver = new ActiveShardsObserver(clusterService, threadPool);
} }
@Override @Override

View File

@ -35,9 +35,7 @@ import org.apache.lucene.search.TotalHits;
import org.apache.lucene.search.TotalHits.Relation; import org.apache.lucene.search.TotalHits.Relation;
import org.apache.lucene.search.grouping.CollapseTopFieldDocs; import org.apache.lucene.search.grouping.CollapseTopFieldDocs;
import org.elasticsearch.common.collect.HppcMaps; import org.elasticsearch.common.collect.HppcMaps;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchHits;
@ -71,7 +69,7 @@ import java.util.function.IntFunction;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.StreamSupport; import java.util.stream.StreamSupport;
public final class SearchPhaseController extends AbstractComponent { public final class SearchPhaseController {
private static final ScoreDoc[] EMPTY_DOCS = new ScoreDoc[0]; private static final ScoreDoc[] EMPTY_DOCS = new ScoreDoc[0];
@ -79,11 +77,9 @@ public final class SearchPhaseController extends AbstractComponent {
/** /**
* Constructor. * Constructor.
* @param settings Node settings
* @param reduceContextFunction A function that builds a context for the reduce of an {@link InternalAggregation} * @param reduceContextFunction A function that builds a context for the reduce of an {@link InternalAggregation}
*/ */
public SearchPhaseController(Settings settings, Function<Boolean, ReduceContext> reduceContextFunction) { public SearchPhaseController(Function<Boolean, ReduceContext> reduceContextFunction) {
super(settings);
this.reduceContextFunction = reduceContextFunction; this.reduceContextFunction = reduceContextFunction;
} }

View File

@ -26,11 +26,9 @@ import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.action.support.HandledTransportAction.ChannelActionListener; import org.elasticsearch.action.support.HandledTransportAction.ChannelActionListener;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchPhaseResult;
import org.elasticsearch.search.SearchService; import org.elasticsearch.search.SearchService;
@ -66,7 +64,7 @@ import java.util.function.BiFunction;
* An encapsulation of {@link org.elasticsearch.search.SearchService} operations exposed through * An encapsulation of {@link org.elasticsearch.search.SearchService} operations exposed through
* transport. * transport.
*/ */
public class SearchTransportService extends AbstractComponent { public class SearchTransportService {
public static final String FREE_CONTEXT_SCROLL_ACTION_NAME = "indices:data/read/search[free_context/scroll]"; public static final String FREE_CONTEXT_SCROLL_ACTION_NAME = "indices:data/read/search[free_context/scroll]";
public static final String FREE_CONTEXT_ACTION_NAME = "indices:data/read/search[free_context]"; public static final String FREE_CONTEXT_ACTION_NAME = "indices:data/read/search[free_context]";
@ -84,9 +82,8 @@ public class SearchTransportService extends AbstractComponent {
private final BiFunction<Transport.Connection, SearchActionListener, ActionListener> responseWrapper; private final BiFunction<Transport.Connection, SearchActionListener, ActionListener> responseWrapper;
private final Map<String, Long> clientConnections = ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency(); private final Map<String, Long> clientConnections = ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency();
public SearchTransportService(Settings settings, TransportService transportService, public SearchTransportService(TransportService transportService,
BiFunction<Transport.Connection, SearchActionListener, ActionListener> responseWrapper) { BiFunction<Transport.Connection, SearchActionListener, ActionListener> responseWrapper) {
super(settings);
this.transportService = transportService; this.transportService = transportService;
this.responseWrapper = responseWrapper; this.responseWrapper = responseWrapper;
} }

View File

@ -22,8 +22,6 @@ package org.elasticsearch.action.support;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.Task;
/** /**
@ -47,12 +45,7 @@ public interface ActionFilter {
* filter chain. This base class should serve any action filter implementations that doesn't require * filter chain. This base class should serve any action filter implementations that doesn't require
* to apply async filtering logic. * to apply async filtering logic.
*/ */
abstract class Simple extends AbstractComponent implements ActionFilter { abstract class Simple implements ActionFilter {
protected Simple(Settings settings) {
super(settings);
}
@Override @Override
public final <Request extends ActionRequest, Response extends ActionResponse> void apply(Task task, String action, Request request, public final <Request extends ActionRequest, Response extends ActionResponse> void apply(Task task, String action, Request request,
ActionListener<Response> listener, ActionFilterChain<Request, Response> chain) { ActionListener<Response> listener, ActionFilterChain<Request, Response> chain) {

View File

@ -24,7 +24,6 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.ClusterStateObserver;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.node.NodeClosedException;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
@ -42,8 +41,7 @@ public class ActiveShardsObserver extends AbstractComponent {
private final ClusterService clusterService; private final ClusterService clusterService;
private final ThreadPool threadPool; private final ThreadPool threadPool;
public ActiveShardsObserver(final Settings settings, final ClusterService clusterService, final ThreadPool threadPool) { public ActiveShardsObserver(final ClusterService clusterService, final ThreadPool threadPool) {
super(settings);
this.clusterService = clusterService; this.clusterService = clusterService;
this.threadPool = threadPool; this.threadPool = threadPool;
} }

View File

@ -19,7 +19,6 @@
package org.elasticsearch.action.support; package org.elasticsearch.action.support;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Setting.Property;
@ -28,7 +27,7 @@ import org.elasticsearch.common.settings.Settings;
/** /**
* Helper for dealing with destructive operations and wildcard usage. * Helper for dealing with destructive operations and wildcard usage.
*/ */
public final class DestructiveOperations extends AbstractComponent { public final class DestructiveOperations {
/** /**
* Setting which controls whether wildcard usage (*, prefix*, _all) is allowed. * Setting which controls whether wildcard usage (*, prefix*, _all) is allowed.
@ -38,7 +37,6 @@ public final class DestructiveOperations extends AbstractComponent {
private volatile boolean destructiveRequiresName; private volatile boolean destructiveRequiresName;
public DestructiveOperations(Settings settings, ClusterSettings clusterSettings) { public DestructiveOperations(Settings settings, ClusterSettings clusterSettings) {
super(settings);
destructiveRequiresName = REQUIRES_NAME_SETTING.get(settings); destructiveRequiresName = REQUIRES_NAME_SETTING.get(settings);
clusterSettings.addSettingsUpdateConsumer(REQUIRES_NAME_SETTING, this::setDestructiveRequiresName); clusterSettings.addSettingsUpdateConsumer(REQUIRES_NAME_SETTING, this::setDestructiveRequiresName);
} }

View File

@ -39,7 +39,7 @@ public abstract class TransportAction<Request extends ActionRequest, Response ex
protected final TaskManager taskManager; protected final TaskManager taskManager;
protected TransportAction(Settings settings, String actionName, ActionFilters actionFilters, TaskManager taskManager) { protected TransportAction(Settings settings, String actionName, ActionFilters actionFilters, TaskManager taskManager) {
super(settings); // TODO drop settings from ctor
this.actionName = actionName; this.actionName = actionName;
this.filters = actionFilters.filters(); this.filters = actionFilters.filters();
this.taskManager = taskManager; this.taskManager = taskManager;

View File

@ -31,7 +31,6 @@ import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
@ -60,8 +59,7 @@ public class UpdateHelper extends AbstractComponent {
private final ScriptService scriptService; private final ScriptService scriptService;
public UpdateHelper(Settings settings, ScriptService scriptService) { public UpdateHelper(ScriptService scriptService) {
super(settings);
this.scriptService = scriptService; this.scriptService = scriptService;
} }

View File

@ -350,7 +350,6 @@ public abstract class AbstractClient extends AbstractComponent implements Client
private final ThreadedActionListener.Wrapper threadedWrapper; private final ThreadedActionListener.Wrapper threadedWrapper;
public AbstractClient(Settings settings, ThreadPool threadPool) { public AbstractClient(Settings settings, ThreadPool threadPool) {
super(settings);
this.settings = settings; this.settings = settings;
this.threadPool = threadPool; this.threadPool = threadPool;
this.admin = new Admin(this); this.admin = new Admin(this);

View File

@ -124,7 +124,6 @@ final class TransportClientNodesService extends AbstractComponent implements Clo
TransportClientNodesService(Settings settings, TransportService transportService, TransportClientNodesService(Settings settings, TransportService transportService,
ThreadPool threadPool, TransportClient.HostFailureListener hostFailureListener) { ThreadPool threadPool, TransportClient.HostFailureListener hostFailureListener) {
super(settings);
this.clusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings); this.clusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings);
this.transportService = transportService; this.transportService = transportService;
this.threadPool = threadPool; this.threadPool = threadPool;

View File

@ -108,11 +108,11 @@ public class ClusterModule extends AbstractModule {
public ClusterModule(Settings settings, ClusterService clusterService, List<ClusterPlugin> clusterPlugins, public ClusterModule(Settings settings, ClusterService clusterService, List<ClusterPlugin> clusterPlugins,
ClusterInfoService clusterInfoService) { ClusterInfoService clusterInfoService) {
this.deciderList = createAllocationDeciders(settings, clusterService.getClusterSettings(), clusterPlugins); this.deciderList = createAllocationDeciders(settings, clusterService.getClusterSettings(), clusterPlugins);
this.allocationDeciders = new AllocationDeciders(settings, deciderList); this.allocationDeciders = new AllocationDeciders(deciderList);
this.shardsAllocator = createShardsAllocator(settings, clusterService.getClusterSettings(), clusterPlugins); this.shardsAllocator = createShardsAllocator(settings, clusterService.getClusterSettings(), clusterPlugins);
this.clusterService = clusterService; this.clusterService = clusterService;
this.indexNameExpressionResolver = new IndexNameExpressionResolver(settings); this.indexNameExpressionResolver = new IndexNameExpressionResolver();
this.allocationService = new AllocationService(settings, allocationDeciders, shardsAllocator, clusterInfoService); this.allocationService = new AllocationService(allocationDeciders, shardsAllocator, clusterInfoService);
} }
public static List<Entry> getNamedWriteables() { public static List<Entry> getNamedWriteables() {
@ -205,16 +205,16 @@ public class ClusterModule extends AbstractModule {
List<ClusterPlugin> clusterPlugins) { List<ClusterPlugin> clusterPlugins) {
// collect deciders by class so that we can detect duplicates // collect deciders by class so that we can detect duplicates
Map<Class, AllocationDecider> deciders = new LinkedHashMap<>(); Map<Class, AllocationDecider> deciders = new LinkedHashMap<>();
addAllocationDecider(deciders, new MaxRetryAllocationDecider(settings)); addAllocationDecider(deciders, new MaxRetryAllocationDecider());
addAllocationDecider(deciders, new ResizeAllocationDecider(settings)); addAllocationDecider(deciders, new ResizeAllocationDecider());
addAllocationDecider(deciders, new ReplicaAfterPrimaryActiveAllocationDecider(settings)); addAllocationDecider(deciders, new ReplicaAfterPrimaryActiveAllocationDecider());
addAllocationDecider(deciders, new RebalanceOnlyWhenActiveAllocationDecider(settings)); addAllocationDecider(deciders, new RebalanceOnlyWhenActiveAllocationDecider());
addAllocationDecider(deciders, new ClusterRebalanceAllocationDecider(settings, clusterSettings)); addAllocationDecider(deciders, new ClusterRebalanceAllocationDecider(settings, clusterSettings));
addAllocationDecider(deciders, new ConcurrentRebalanceAllocationDecider(settings, clusterSettings)); addAllocationDecider(deciders, new ConcurrentRebalanceAllocationDecider(settings, clusterSettings));
addAllocationDecider(deciders, new EnableAllocationDecider(settings, clusterSettings)); addAllocationDecider(deciders, new EnableAllocationDecider(settings, clusterSettings));
addAllocationDecider(deciders, new NodeVersionAllocationDecider(settings)); addAllocationDecider(deciders, new NodeVersionAllocationDecider());
addAllocationDecider(deciders, new SnapshotInProgressAllocationDecider(settings)); addAllocationDecider(deciders, new SnapshotInProgressAllocationDecider());
addAllocationDecider(deciders, new RestoreInProgressAllocationDecider(settings)); addAllocationDecider(deciders, new RestoreInProgressAllocationDecider());
addAllocationDecider(deciders, new FilterAllocationDecider(settings, clusterSettings)); addAllocationDecider(deciders, new FilterAllocationDecider(settings, clusterSettings));
addAllocationDecider(deciders, new SameShardAllocationDecider(settings, clusterSettings)); addAllocationDecider(deciders, new SameShardAllocationDecider(settings, clusterSettings));
addAllocationDecider(deciders, new DiskThresholdDecider(settings, clusterSettings)); addAllocationDecider(deciders, new DiskThresholdDecider(settings, clusterSettings));

View File

@ -19,19 +19,12 @@
package org.elasticsearch.cluster; package org.elasticsearch.cluster;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
/** /**
* ClusterInfoService that provides empty maps for disk usage and shard sizes * ClusterInfoService that provides empty maps for disk usage and shard sizes
*/ */
public class EmptyClusterInfoService extends AbstractComponent implements ClusterInfoService { public class EmptyClusterInfoService implements ClusterInfoService {
public static final EmptyClusterInfoService INSTANCE = new EmptyClusterInfoService(); public static final EmptyClusterInfoService INSTANCE = new EmptyClusterInfoService();
private EmptyClusterInfoService() {
super(Settings.EMPTY);
}
@Override @Override
public ClusterInfo getClusterInfo() { public ClusterInfo getClusterInfo() {
return ClusterInfo.EMPTY; return ClusterInfo.EMPTY;

View File

@ -88,7 +88,6 @@ public class InternalClusterInfoService extends AbstractComponent
public InternalClusterInfoService(Settings settings, ClusterService clusterService, ThreadPool threadPool, NodeClient client, public InternalClusterInfoService(Settings settings, ClusterService clusterService, ThreadPool threadPool, NodeClient client,
Consumer<ClusterInfo> listener) { Consumer<ClusterInfo> listener) {
super(settings);
this.leastAvailableSpaceUsages = ImmutableOpenMap.of(); this.leastAvailableSpaceUsages = ImmutableOpenMap.of();
this.mostAvailableSpaceUsages = ImmutableOpenMap.of(); this.mostAvailableSpaceUsages = ImmutableOpenMap.of();
this.shardRoutingToDataPath = ImmutableOpenMap.of(); this.shardRoutingToDataPath = ImmutableOpenMap.of();

View File

@ -23,7 +23,6 @@ import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequestBuilder; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequestBuilder;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.client.IndicesAdminClient;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
@ -39,7 +38,7 @@ import org.elasticsearch.index.mapper.Mapping;
* Called by shards in the cluster when their mapping was dynamically updated and it needs to be updated * Called by shards in the cluster when their mapping was dynamically updated and it needs to be updated
* in the cluster state meta data (and broadcast to all members). * in the cluster state meta data (and broadcast to all members).
*/ */
public class MappingUpdatedAction extends AbstractComponent { public class MappingUpdatedAction {
public static final Setting<TimeValue> INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING = public static final Setting<TimeValue> INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING =
Setting.positiveTimeSetting("indices.mapping.dynamic_timeout", TimeValue.timeValueSeconds(30), Setting.positiveTimeSetting("indices.mapping.dynamic_timeout", TimeValue.timeValueSeconds(30),
@ -50,7 +49,6 @@ public class MappingUpdatedAction extends AbstractComponent {
@Inject @Inject
public MappingUpdatedAction(Settings settings, ClusterSettings clusterSettings) { public MappingUpdatedAction(Settings settings, ClusterSettings clusterSettings) {
super(settings);
this.dynamicMappingUpdateTimeout = INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING.get(settings); this.dynamicMappingUpdateTimeout = INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING.get(settings);
clusterSettings.addSettingsUpdateConsumer(INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING, this::setDynamicMappingUpdateTimeout); clusterSettings.addSettingsUpdateConsumer(INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING, this::setDynamicMappingUpdateTimeout);
} }

View File

@ -28,7 +28,6 @@ import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.EmptyTransportResponseHandler; import org.elasticsearch.transport.EmptyTransportResponseHandler;
@ -48,8 +47,7 @@ public class NodeMappingRefreshAction extends AbstractComponent {
private final MetaDataMappingService metaDataMappingService; private final MetaDataMappingService metaDataMappingService;
@Inject @Inject
public NodeMappingRefreshAction(Settings settings, TransportService transportService, MetaDataMappingService metaDataMappingService) { public NodeMappingRefreshAction(TransportService transportService, MetaDataMappingService metaDataMappingService) {
super(settings);
this.transportService = transportService; this.transportService = transportService;
this.metaDataMappingService = metaDataMappingService; this.metaDataMappingService = metaDataMappingService;
transportService.registerRequestHandler(ACTION_NAME, transportService.registerRequestHandler(ACTION_NAME,

View File

@ -46,7 +46,6 @@ import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.Discovery;
@ -89,9 +88,8 @@ public class ShardStateAction extends AbstractComponent {
private final ConcurrentMap<FailedShardEntry, CompositeListener> remoteFailedShardsCache = ConcurrentCollections.newConcurrentMap(); private final ConcurrentMap<FailedShardEntry, CompositeListener> remoteFailedShardsCache = ConcurrentCollections.newConcurrentMap();
@Inject @Inject
public ShardStateAction(Settings settings, ClusterService clusterService, TransportService transportService, public ShardStateAction(ClusterService clusterService, TransportService transportService,
AllocationService allocationService, RoutingService routingService, ThreadPool threadPool) { AllocationService allocationService, RoutingService routingService, ThreadPool threadPool) {
super(settings);
this.transportService = transportService; this.transportService = transportService;
this.clusterService = clusterService; this.clusterService = clusterService;
this.threadPool = threadPool; this.threadPool = threadPool;

View File

@ -22,8 +22,6 @@ package org.elasticsearch.cluster.metadata;
import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -43,12 +41,7 @@ import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQuery
* Validator for an alias, to be used before adding an alias to the index metadata * Validator for an alias, to be used before adding an alias to the index metadata
* and make sure the alias is valid * and make sure the alias is valid
*/ */
public class AliasValidator extends AbstractComponent { public class AliasValidator {
public AliasValidator(Settings settings) {
super(settings);
}
/** /**
* Allows to validate an {@link org.elasticsearch.action.admin.indices.alias.Alias} and make sure * Allows to validate an {@link org.elasticsearch.action.admin.indices.alias.Alias} and make sure
* it's valid before it gets added to the index metadata. Doesn't validate the alias filter. * it's valid before it gets added to the index metadata. Doesn't validate the alias filter.

Some files were not shown because too many files have changed in this diff Show More