Removes version 2.x constants from Version (#24011)

* Removes version 2.x constants from Version

Closes #21887

* Addresses review comments
This commit is contained in:
Colin Goodheart-Smithe 2017-04-11 08:31:22 +01:00 committed by GitHub
parent f22e0dc30b
commit 0114f0061c
44 changed files with 160 additions and 689 deletions

View File

@ -35,48 +35,6 @@ public class Version implements Comparable<Version> {
* values below 25 are for alpha builder (since 5.0), and above 25 and below 50 are beta builds, and below 99 are RC builds, with 99 * values below 25 are for alpha builder (since 5.0), and above 25 and below 50 are beta builds, and below 99 are RC builds, with 99
* indicating a release the (internal) format of the id is there so we can easily do after/before checks on the id * indicating a release the (internal) format of the id is there so we can easily do after/before checks on the id
*/ */
public static final int V_2_0_0_ID = 2000099;
public static final Version V_2_0_0 = new Version(V_2_0_0_ID, org.apache.lucene.util.Version.LUCENE_5_2_1);
public static final int V_2_0_1_ID = 2000199;
public static final Version V_2_0_1 = new Version(V_2_0_1_ID, org.apache.lucene.util.Version.LUCENE_5_2_1);
public static final int V_2_0_2_ID = 2000299;
public static final Version V_2_0_2 = new Version(V_2_0_2_ID, org.apache.lucene.util.Version.LUCENE_5_2_1);
public static final int V_2_1_0_ID = 2010099;
public static final Version V_2_1_0 = new Version(V_2_1_0_ID, org.apache.lucene.util.Version.LUCENE_5_3_1);
public static final int V_2_1_1_ID = 2010199;
public static final Version V_2_1_1 = new Version(V_2_1_1_ID, org.apache.lucene.util.Version.LUCENE_5_3_1);
public static final int V_2_1_2_ID = 2010299;
public static final Version V_2_1_2 = new Version(V_2_1_2_ID, org.apache.lucene.util.Version.LUCENE_5_3_1);
public static final int V_2_2_0_ID = 2020099;
public static final Version V_2_2_0 = new Version(V_2_2_0_ID, org.apache.lucene.util.Version.LUCENE_5_4_1);
public static final int V_2_2_1_ID = 2020199;
public static final Version V_2_2_1 = new Version(V_2_2_1_ID, org.apache.lucene.util.Version.LUCENE_5_4_1);
public static final int V_2_2_2_ID = 2020299;
public static final Version V_2_2_2 = new Version(V_2_2_2_ID, org.apache.lucene.util.Version.LUCENE_5_4_1);
public static final int V_2_3_0_ID = 2030099;
public static final Version V_2_3_0 = new Version(V_2_3_0_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
public static final int V_2_3_1_ID = 2030199;
public static final Version V_2_3_1 = new Version(V_2_3_1_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
public static final int V_2_3_2_ID = 2030299;
public static final Version V_2_3_2 = new Version(V_2_3_2_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
public static final int V_2_3_3_ID = 2030399;
public static final Version V_2_3_3 = new Version(V_2_3_3_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
public static final int V_2_3_4_ID = 2030499;
public static final Version V_2_3_4 = new Version(V_2_3_4_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
public static final int V_2_3_5_ID = 2030599;
public static final Version V_2_3_5 = new Version(V_2_3_5_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
public static final int V_2_4_0_ID = 2040099;
public static final Version V_2_4_0 = new Version(V_2_4_0_ID, org.apache.lucene.util.Version.LUCENE_5_5_2);
public static final int V_2_4_1_ID = 2040199;
public static final Version V_2_4_1 = new Version(V_2_4_1_ID, org.apache.lucene.util.Version.LUCENE_5_5_2);
public static final int V_2_4_2_ID = 2040299;
public static final Version V_2_4_2 = new Version(V_2_4_2_ID, org.apache.lucene.util.Version.LUCENE_5_5_2);
public static final int V_2_4_3_ID = 2040399;
public static final Version V_2_4_3 = new Version(V_2_4_3_ID, org.apache.lucene.util.Version.LUCENE_5_5_2);
public static final int V_2_4_4_ID = 2040499;
public static final Version V_2_4_4 = new Version(V_2_4_4_ID, org.apache.lucene.util.Version.LUCENE_5_5_2);
public static final int V_2_4_5_ID = 2040599;
public static final Version V_2_4_5 = new Version(V_2_4_5_ID, org.apache.lucene.util.Version.LUCENE_5_5_2);
public static final int V_5_0_0_alpha1_ID = 5000001; public static final int V_5_0_0_alpha1_ID = 5000001;
public static final Version V_5_0_0_alpha1 = new Version(V_5_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_6_0_0); public static final Version V_5_0_0_alpha1 = new Version(V_5_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_6_0_0);
public static final int V_5_0_0_alpha2_ID = 5000002; public static final int V_5_0_0_alpha2_ID = 5000002;
@ -182,48 +140,6 @@ public class Version implements Comparable<Version> {
return V_5_0_0_alpha2; return V_5_0_0_alpha2;
case V_5_0_0_alpha1_ID: case V_5_0_0_alpha1_ID:
return V_5_0_0_alpha1; return V_5_0_0_alpha1;
case V_2_4_5_ID:
return V_2_4_5;
case V_2_4_4_ID:
return V_2_4_4;
case V_2_4_3_ID:
return V_2_4_3;
case V_2_4_2_ID:
return V_2_4_2;
case V_2_4_1_ID:
return V_2_4_1;
case V_2_4_0_ID:
return V_2_4_0;
case V_2_3_5_ID:
return V_2_3_5;
case V_2_3_4_ID:
return V_2_3_4;
case V_2_3_3_ID:
return V_2_3_3;
case V_2_3_2_ID:
return V_2_3_2;
case V_2_3_1_ID:
return V_2_3_1;
case V_2_3_0_ID:
return V_2_3_0;
case V_2_2_2_ID:
return V_2_2_2;
case V_2_2_1_ID:
return V_2_2_1;
case V_2_2_0_ID:
return V_2_2_0;
case V_2_1_2_ID:
return V_2_1_2;
case V_2_1_1_ID:
return V_2_1_1;
case V_2_1_0_ID:
return V_2_1_0;
case V_2_0_2_ID:
return V_2_0_2;
case V_2_0_1_ID:
return V_2_0_1;
case V_2_0_0_ID:
return V_2_0_0;
default: default:
return new Version(id, org.apache.lucene.util.Version.LATEST); return new Version(id, org.apache.lucene.util.Version.LATEST);
} }

View File

@ -126,9 +126,7 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
} }
} }
type = in.readOptionalString(); type = in.readOptionalString();
if (in.getVersion().onOrAfter(Version.V_2_2_0)) { attributes = (Map<String, Object>) in.readGenericValue();
attributes = (Map<String, Object>) in.readGenericValue();
}
} }
@Override @Override
@ -141,9 +139,7 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
out.writeOptionalVInt(positionLength > 1 ? positionLength : null); out.writeOptionalVInt(positionLength > 1 ? positionLength : null);
} }
out.writeOptionalString(type); out.writeOptionalString(type);
if (out.getVersion().onOrAfter(Version.V_2_2_0)) { out.writeGenericValue(attributes);
out.writeGenericValue(attributes);
}
} }
} }
@ -200,9 +196,7 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
tokens.add(AnalyzeToken.readAnalyzeToken(in)); tokens.add(AnalyzeToken.readAnalyzeToken(in));
} }
if (in.getVersion().onOrAfter(Version.V_2_2_0)) { detail = in.readOptionalStreamable(DetailAnalyzeResponse::new);
detail = in.readOptionalStreamable(DetailAnalyzeResponse::new);
}
} }
@Override @Override
@ -216,9 +210,7 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
} else { } else {
out.writeVInt(0); out.writeVInt(0);
} }
if (out.getVersion().onOrAfter(Version.V_2_2_0)) { out.writeOptionalStreamable(detail);
out.writeOptionalStreamable(detail);
}
} }
static final class Fields { static final class Fields {

View File

@ -19,7 +19,6 @@
package org.elasticsearch.action.fieldstats; package org.elasticsearch.action.fieldstats;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ValidateActions; import org.elasticsearch.action.ValidateActions;
import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.action.support.broadcast.BroadcastRequest;
@ -200,9 +199,7 @@ public class FieldStatsRequest extends BroadcastRequest<FieldStatsRequest> {
out.writeByte(indexConstraint.getProperty().getId()); out.writeByte(indexConstraint.getProperty().getId());
out.writeByte(indexConstraint.getComparison().getId()); out.writeByte(indexConstraint.getComparison().getId());
out.writeString(indexConstraint.getValue()); out.writeString(indexConstraint.getValue());
if (out.getVersion().onOrAfter(Version.V_2_0_1)) { out.writeOptionalString(indexConstraint.getOptionalFormat());
out.writeOptionalString(indexConstraint.getOptionalFormat());
}
} }
out.writeString(level); out.writeString(level);
out.writeBoolean(useCache); out.writeBoolean(useCache);

View File

@ -19,7 +19,6 @@
package org.elasticsearch.action.fieldstats; package org.elasticsearch.action.fieldstats;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import java.io.IOException; import java.io.IOException;
@ -39,11 +38,7 @@ public class IndexConstraint {
this.property = Property.read(input.readByte()); this.property = Property.read(input.readByte());
this.comparison = Comparison.read(input.readByte()); this.comparison = Comparison.read(input.readByte());
this.value = input.readString(); this.value = input.readString();
if (input.getVersion().onOrAfter(Version.V_2_0_1)) { this.optionalFormat = input.readOptionalString();
this.optionalFormat = input.readOptionalString();
} else {
this.optionalFormat = null;
}
} }
public IndexConstraint(String field, Property property, Comparison comparison, String value) { public IndexConstraint(String field, Property property, Comparison comparison, String value) {

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.analysis; package org.elasticsearch.index.analysis;
import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper;
@ -78,19 +77,6 @@ public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider<Custom
int positionIncrementGap = TextFieldMapper.Defaults.POSITION_INCREMENT_GAP; int positionIncrementGap = TextFieldMapper.Defaults.POSITION_INCREMENT_GAP;
if (analyzerSettings.getAsMap().containsKey("position_offset_gap")){
if (indexSettings.getIndexVersionCreated().before(Version.V_2_0_0)){
if (analyzerSettings.getAsMap().containsKey("position_increment_gap")){
throw new IllegalArgumentException("Custom Analyzer [" + name() +
"] defined both [position_offset_gap] and [position_increment_gap], use only [position_increment_gap]");
}
positionIncrementGap = analyzerSettings.getAsInt("position_offset_gap", positionIncrementGap);
}else {
throw new IllegalArgumentException("Option [position_offset_gap] in Custom Analyzer [" + name() +
"] has been renamed, please use [position_increment_gap] instead.");
}
}
positionIncrementGap = analyzerSettings.getAsInt("position_increment_gap", positionIncrementGap); positionIncrementGap = analyzerSettings.getAsInt("position_increment_gap", positionIncrementGap);
int offsetGap = analyzerSettings.getAsInt("offset_gap", -1);; int offsetGap = analyzerSettings.getAsInt("offset_gap", -1);;

View File

@ -26,7 +26,6 @@ import org.apache.lucene.search.IndexOrDocValuesQuery;
import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Numbers; import org.elasticsearch.common.Numbers;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
@ -286,9 +285,8 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBounding
return ignoreUnmapped; return ignoreUnmapped;
} }
QueryValidationException checkLatLon(boolean indexCreatedBeforeV2_0) { QueryValidationException checkLatLon() {
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes if (GeoValidationMethod.isIgnoreMalformed(validationMethod)) {
if (GeoValidationMethod.isIgnoreMalformed(validationMethod) || indexCreatedBeforeV2_0) {
return null; return null;
} }
@ -327,15 +325,14 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBounding
throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field"); throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field");
} }
QueryValidationException exception = checkLatLon(context.indexVersionCreated().before(Version.V_2_0_0)); QueryValidationException exception = checkLatLon();
if (exception != null) { if (exception != null) {
throw new QueryShardException(context, "couldn't validate latitude/ longitude values", exception); throw new QueryShardException(context, "couldn't validate latitude/ longitude values", exception);
} }
GeoPoint luceneTopLeft = new GeoPoint(topLeft); GeoPoint luceneTopLeft = new GeoPoint(topLeft);
GeoPoint luceneBottomRight = new GeoPoint(bottomRight); GeoPoint luceneBottomRight = new GeoPoint(bottomRight);
final Version indexVersionCreated = context.indexVersionCreated(); if (GeoValidationMethod.isCoerce(validationMethod)) {
if (indexVersionCreated.onOrAfter(Version.V_2_2_0) || GeoValidationMethod.isCoerce(validationMethod)) {
// Special case: if the difference between the left and right is 360 and the right is greater than the left, we are asking for // Special case: if the difference between the left and right is 360 and the right is greater than the left, we are asking for
// the complete longitude range so need to set longitude to the complete longitude range // the complete longitude range so need to set longitude to the complete longitude range
double right = luceneBottomRight.getLon(); double right = luceneBottomRight.getLon();

View File

@ -24,7 +24,6 @@ import org.apache.lucene.document.LatLonPoint;
import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.IndexOrDocValuesQuery;
import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
@ -241,13 +240,12 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder<GeoDistanceQue
throw new QueryShardException(shardContext, "field [" + fieldName + "] is not a geo_point field"); throw new QueryShardException(shardContext, "field [" + fieldName + "] is not a geo_point field");
} }
final Version indexVersionCreated = shardContext.indexVersionCreated(); QueryValidationException exception = checkLatLon();
QueryValidationException exception = checkLatLon(shardContext.indexVersionCreated().before(Version.V_2_0_0));
if (exception != null) { if (exception != null) {
throw new QueryShardException(shardContext, "couldn't validate latitude/ longitude values", exception); throw new QueryShardException(shardContext, "couldn't validate latitude/ longitude values", exception);
} }
if (indexVersionCreated.onOrAfter(Version.V_2_2_0) || GeoValidationMethod.isCoerce(validationMethod)) { if (GeoValidationMethod.isCoerce(validationMethod)) {
GeoUtils.normalizePoint(center, true, true); GeoUtils.normalizePoint(center, true, true);
} }
@ -389,9 +387,8 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder<GeoDistanceQue
Objects.equals(ignoreUnmapped, other.ignoreUnmapped); Objects.equals(ignoreUnmapped, other.ignoreUnmapped);
} }
private QueryValidationException checkLatLon(boolean indexCreatedBeforeV2_0) { private QueryValidationException checkLatLon() {
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes if (GeoValidationMethod.isIgnoreMalformed(validationMethod)) {
if (GeoValidationMethod.isIgnoreMalformed(validationMethod) || indexCreatedBeforeV2_0) {
return null; return null;
} }

View File

@ -27,7 +27,6 @@ import org.apache.lucene.search.FieldComparator;
import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSet;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoDistance;
@ -491,12 +490,11 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
@Override @Override
public SortFieldAndFormat build(QueryShardContext context) throws IOException { public SortFieldAndFormat build(QueryShardContext context) throws IOException {
final boolean indexCreatedBeforeV2_0 = context.indexVersionCreated().before(Version.V_2_0_0);
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed // validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed
// on 2.x created indexes // on 2.x created indexes
GeoPoint[] localPoints = points.toArray(new GeoPoint[points.size()]); GeoPoint[] localPoints = points.toArray(new GeoPoint[points.size()]);
if (!indexCreatedBeforeV2_0 && !GeoValidationMethod.isIgnoreMalformed(validation)) { if (GeoValidationMethod.isIgnoreMalformed(validation) == false) {
for (GeoPoint point : localPoints) { for (GeoPoint point : localPoints) {
if (GeoUtils.isValidLatitude(point.lat()) == false) { if (GeoUtils.isValidLatitude(point.lat()) == false) {
throw new ElasticsearchParseException( throw new ElasticsearchParseException(

View File

@ -33,44 +33,43 @@ import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import static org.elasticsearch.Version.V_2_2_0; import static org.elasticsearch.Version.V_5_3_0_UNRELEASED;
import static org.elasticsearch.Version.V_5_0_0_alpha1; import static org.elasticsearch.Version.V_6_0_0_alpha1_UNRELEASED;
import static org.elasticsearch.test.VersionUtils.randomVersion; import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThan;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.hamcrest.Matchers.sameInstance; import static org.hamcrest.Matchers.sameInstance;
public class VersionTests extends ESTestCase { public class VersionTests extends ESTestCase {
public void testVersionComparison() throws Exception { public void testVersionComparison() throws Exception {
assertThat(V_2_2_0.before(V_5_0_0_alpha1), is(true)); assertThat(V_5_3_0_UNRELEASED.before(V_6_0_0_alpha1_UNRELEASED), is(true));
assertThat(V_2_2_0.before(V_2_2_0), is(false)); assertThat(V_5_3_0_UNRELEASED.before(V_5_3_0_UNRELEASED), is(false));
assertThat(V_5_0_0_alpha1.before(V_2_2_0), is(false)); assertThat(V_6_0_0_alpha1_UNRELEASED.before(V_5_3_0_UNRELEASED), is(false));
assertThat(V_2_2_0.onOrBefore(V_5_0_0_alpha1), is(true)); assertThat(V_5_3_0_UNRELEASED.onOrBefore(V_6_0_0_alpha1_UNRELEASED), is(true));
assertThat(V_2_2_0.onOrBefore(V_2_2_0), is(true)); assertThat(V_5_3_0_UNRELEASED.onOrBefore(V_5_3_0_UNRELEASED), is(true));
assertThat(V_5_0_0_alpha1.onOrBefore(V_2_2_0), is(false)); assertThat(V_6_0_0_alpha1_UNRELEASED.onOrBefore(V_5_3_0_UNRELEASED), is(false));
assertThat(V_2_2_0.after(V_5_0_0_alpha1), is(false)); assertThat(V_5_3_0_UNRELEASED.after(V_6_0_0_alpha1_UNRELEASED), is(false));
assertThat(V_2_2_0.after(V_2_2_0), is(false)); assertThat(V_5_3_0_UNRELEASED.after(V_5_3_0_UNRELEASED), is(false));
assertThat(V_5_0_0_alpha1.after(V_2_2_0), is(true)); assertThat(V_6_0_0_alpha1_UNRELEASED.after(V_5_3_0_UNRELEASED), is(true));
assertThat(V_2_2_0.onOrAfter(V_5_0_0_alpha1), is(false)); assertThat(V_5_3_0_UNRELEASED.onOrAfter(V_6_0_0_alpha1_UNRELEASED), is(false));
assertThat(V_2_2_0.onOrAfter(V_2_2_0), is(true)); assertThat(V_5_3_0_UNRELEASED.onOrAfter(V_5_3_0_UNRELEASED), is(true));
assertThat(V_5_0_0_alpha1.onOrAfter(V_2_2_0), is(true)); assertThat(V_6_0_0_alpha1_UNRELEASED.onOrAfter(V_5_3_0_UNRELEASED), is(true));
assertTrue(Version.fromString("5.0.0-alpha2").onOrAfter(Version.fromString("5.0.0-alpha1"))); assertTrue(Version.fromString("5.0.0-alpha2").onOrAfter(Version.fromString("5.0.0-alpha1")));
assertTrue(Version.fromString("5.0.0").onOrAfter(Version.fromString("5.0.0-beta2"))); assertTrue(Version.fromString("5.0.0").onOrAfter(Version.fromString("5.0.0-beta2")));
assertTrue(Version.fromString("5.0.0-rc1").onOrAfter(Version.fromString("5.0.0-beta24"))); assertTrue(Version.fromString("5.0.0-rc1").onOrAfter(Version.fromString("5.0.0-beta24")));
assertTrue(Version.fromString("5.0.0-alpha24").before(Version.fromString("5.0.0-beta0"))); assertTrue(Version.fromString("5.0.0-alpha24").before(Version.fromString("5.0.0-beta0")));
assertThat(V_2_2_0, is(lessThan(V_5_0_0_alpha1))); assertThat(V_5_3_0_UNRELEASED, is(lessThan(V_6_0_0_alpha1_UNRELEASED)));
assertThat(V_2_2_0.compareTo(V_2_2_0), is(0)); assertThat(V_5_3_0_UNRELEASED.compareTo(V_5_3_0_UNRELEASED), is(0));
assertThat(V_5_0_0_alpha1, is(greaterThan(V_2_2_0))); assertThat(V_6_0_0_alpha1_UNRELEASED, is(greaterThan(V_5_3_0_UNRELEASED)));
} }
public void testMin() { public void testMin() {
@ -99,9 +98,11 @@ public class VersionTests extends ESTestCase {
public void testMinimumIndexCompatibilityVersion() { public void testMinimumIndexCompatibilityVersion() {
assertEquals(Version.V_5_0_0, Version.V_6_0_0_alpha1_UNRELEASED.minimumIndexCompatibilityVersion()); assertEquals(Version.V_5_0_0, Version.V_6_0_0_alpha1_UNRELEASED.minimumIndexCompatibilityVersion());
assertEquals(Version.V_2_0_0, Version.V_5_0_0.minimumIndexCompatibilityVersion()); assertEquals(Version.fromId(2000099), Version.V_5_0_0.minimumIndexCompatibilityVersion());
assertEquals(Version.V_2_0_0, Version.V_5_1_1_UNRELEASED.minimumIndexCompatibilityVersion()); assertEquals(Version.fromId(2000099),
assertEquals(Version.V_2_0_0, Version.V_5_0_0_alpha1.minimumIndexCompatibilityVersion()); Version.V_5_1_1_UNRELEASED.minimumIndexCompatibilityVersion());
assertEquals(Version.fromId(2000099),
Version.V_5_0_0_alpha1.minimumIndexCompatibilityVersion());
} }
public void testVersionConstantPresent() { public void testVersionConstantPresent() {
@ -155,7 +156,8 @@ public class VersionTests extends ESTestCase {
public void testIndexCreatedVersion() { public void testIndexCreatedVersion() {
// an actual index has a IndexMetaData.SETTING_INDEX_UUID // an actual index has a IndexMetaData.SETTING_INDEX_UUID
final Version version = randomFrom(Version.V_2_0_0, Version.V_2_3_0, Version.V_5_0_0_alpha1); final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_2,
Version.V_5_2_0_UNRELEASED, Version.V_6_0_0_alpha1_UNRELEASED);
assertEquals(version, Version.indexCreated(Settings.builder().put(IndexMetaData.SETTING_INDEX_UUID, "foo").put(IndexMetaData.SETTING_VERSION_CREATED, version).build())); assertEquals(version, Version.indexCreated(Settings.builder().put(IndexMetaData.SETTING_INDEX_UUID, "foo").put(IndexMetaData.SETTING_VERSION_CREATED, version).build()));
} }
@ -230,7 +232,7 @@ public class VersionTests extends ESTestCase {
}); });
assertSame(Version.CURRENT, Version.fromString(Version.CURRENT.toString())); assertSame(Version.CURRENT, Version.fromString(Version.CURRENT.toString()));
assertSame(Version.fromString("2.0.0-SNAPSHOT"), Version.fromString("2.0.0")); assertEquals(Version.fromString("2.0.0-SNAPSHOT"), Version.fromId(2000099));
expectThrows(IllegalArgumentException.class, () -> { expectThrows(IllegalArgumentException.class, () -> {
Version.fromString("5.0.0-SNAPSHOT"); Version.fromString("5.0.0-SNAPSHOT");
@ -325,8 +327,8 @@ public class VersionTests extends ESTestCase {
public void testIsCompatible() { public void testIsCompatible() {
assertTrue(isCompatible(Version.CURRENT, Version.CURRENT.minimumCompatibilityVersion())); assertTrue(isCompatible(Version.CURRENT, Version.CURRENT.minimumCompatibilityVersion()));
assertTrue(isCompatible(Version.V_5_0_0, Version.V_6_0_0_alpha1_UNRELEASED)); assertTrue(isCompatible(Version.V_5_0_0, Version.V_6_0_0_alpha1_UNRELEASED));
assertFalse(isCompatible(Version.V_2_0_0, Version.V_6_0_0_alpha1_UNRELEASED)); assertFalse(isCompatible(Version.fromId(2000099), Version.V_6_0_0_alpha1_UNRELEASED));
assertFalse(isCompatible(Version.V_2_0_0, Version.V_5_0_0)); assertFalse(isCompatible(Version.fromId(2000099), Version.V_5_0_0));
} }
public boolean isCompatible(Version left, Version right) { public boolean isCompatible(Version left, Version right) {

View File

@ -72,7 +72,7 @@ public class MainResponseTests extends ESTestCase {
public void testToXContent() throws IOException { public void testToXContent() throws IOException {
Build build = new Build("buildHash", "2016-11-15".toString(), true); Build build = new Build("buildHash", "2016-11-15".toString(), true);
Version version = Version.V_2_4_5; Version version = Version.CURRENT;
MainResponse response = new MainResponse("nodeName", version, new ClusterName("clusterName"), "clusterUuid", build, true); MainResponse response = new MainResponse("nodeName", version, new ClusterName("clusterName"), "clusterUuid", build, true);
XContentBuilder builder = XContentFactory.jsonBuilder(); XContentBuilder builder = XContentFactory.jsonBuilder();
response.toXContent(builder, ToXContent.EMPTY_PARAMS); response.toXContent(builder, ToXContent.EMPTY_PARAMS);
@ -81,11 +81,11 @@ public class MainResponseTests extends ESTestCase {
+ "\"cluster_name\":\"clusterName\"," + "\"cluster_name\":\"clusterName\","
+ "\"cluster_uuid\":\"clusterUuid\"," + "\"cluster_uuid\":\"clusterUuid\","
+ "\"version\":{" + "\"version\":{"
+ "\"number\":\"2.4.5\"," + "\"number\":\"" + version.toString() + "\","
+ "\"build_hash\":\"buildHash\"," + "\"build_hash\":\"buildHash\","
+ "\"build_date\":\"2016-11-15\"," + "\"build_date\":\"2016-11-15\","
+ "\"build_snapshot\":true," + "\"build_snapshot\":true,"
+ "\"lucene_version\":\"5.5.2\"}," + "\"lucene_version\":\"" + version.luceneVersion.toString() + "\"},"
+ "\"tagline\":\"You Know, for Search\"" + "\"tagline\":\"You Know, for Search\""
+ "}", builder.string()); + "}", builder.string());
} }

View File

@ -1,95 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.bwcompat;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import java.util.Arrays;
import java.util.Collection;
import org.elasticsearch.Version;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.range.Range;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
@ESIntegTestCase.SuiteScopeTestCase
public class IpFieldBwCompatIT extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(InternalSettingsPlugin.class); // uses index.merge.enabled
}
@Override
public void setupSuiteScopeCluster() throws Exception {
assertAcked(prepareCreate("old_index")
.setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_3.id)
.addMapping("type", "ip_field", "type=ip"));
assertAcked(prepareCreate("new_index")
.addMapping("type", "ip_field", "type=ip"));
indexRandom(true,
client().prepareIndex("old_index", "type", "1").setSource("ip_field", "127.0.0.1"),
client().prepareIndex("new_index", "type", "1").setSource("ip_field", "127.0.0.1"),
client().prepareIndex("new_index", "type", "2").setSource("ip_field", "::1"));
}
public void testSort() {
SearchResponse response = client().prepareSearch("old_index", "new_index")
.addSort(SortBuilders.fieldSort("ip_field")).get();
assertNoFailures(response);
assertEquals(3, response.getHits().getTotalHits());
assertEquals("::1", response.getHits().getAt(0).getSortValues()[0]);
assertEquals("127.0.0.1", response.getHits().getAt(1).getSortValues()[0]);
assertEquals("127.0.0.1", response.getHits().getAt(2).getSortValues()[0]);
}
public void testRangeAgg() {
SearchResponse response = client().prepareSearch("old_index", "new_index")
.addAggregation(AggregationBuilders.ipRange("ip_range").field("ip_field")
.addMaskRange("127.0.0.1/16")
.addMaskRange("::1/64")).get();
assertNoFailures(response);
assertEquals(3, response.getHits().getTotalHits());
Range range = response.getAggregations().get("ip_range");
assertEquals(2, range.getBuckets().size());
assertEquals("::1/64", range.getBuckets().get(0).getKeyAsString());
assertEquals(3, range.getBuckets().get(0).getDocCount());
assertEquals("127.0.0.1/16", range.getBuckets().get(1).getKeyAsString());
assertEquals(2, range.getBuckets().get(1).getDocCount());
}
public void testTermsAgg() {
SearchResponse response = client().prepareSearch("old_index", "new_index")
.addAggregation(AggregationBuilders.terms("ip_terms").field("ip_field")).get();
assertNoFailures(response);
assertEquals(3, response.getHits().getTotalHits());
Terms terms = response.getAggregations().get("ip_terms");
assertEquals(2, terms.getBuckets().size());
assertEquals(2, terms.getBucketByKey("127.0.0.1").getDocCount());
assertEquals(1, terms.getBucketByKey("::1").getDocCount());
}
}

View File

@ -316,13 +316,11 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase {
ElasticsearchAssertions.assertNoFailures(searchRsp); ElasticsearchAssertions.assertNoFailures(searchRsp);
assertEquals(numDocs, searchRsp.getHits().getTotalHits()); assertEquals(numDocs, searchRsp.getHits().getTotalHits());
GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings(indexName).get(); GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings(indexName).get();
Version versionCreated = Version.fromId(Integer.parseInt(getSettingsResponse.getSetting(indexName, "index.version.created"))); searchReq = client().prepareSearch(indexName)
if (versionCreated.onOrAfter(Version.V_2_4_0)) { .setQuery(QueryBuilders.existsQuery("field.with.dots"));
searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.existsQuery("field.with.dots")); searchRsp = searchReq.get();
searchRsp = searchReq.get(); ElasticsearchAssertions.assertNoFailures(searchRsp);
ElasticsearchAssertions.assertNoFailures(searchRsp); assertEquals(numDocs, searchRsp.getHits().getTotalHits());
assertEquals(numDocs, searchRsp.getHits().getTotalHits());
}
} }
boolean findPayloadBoostInExplanation(Explanation expl) { boolean findPayloadBoostInExplanation(Explanation expl) {

View File

@ -1,76 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.routing;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.VersionUtils;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.Arrays;
public class RoutingBackwardCompatibilityTests extends ESTestCase {
public void testBackwardCompatibility() throws Exception {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(RoutingBackwardCompatibilityTests.class
.getResourceAsStream("/org/elasticsearch/cluster/routing/shard_routes.txt"), "UTF-8"))) {
for (String line = reader.readLine(); line != null; line = reader.readLine()) {
if (line.startsWith("#")) { // comment
continue;
}
String[] parts = line.split("\t");
assertEquals(Arrays.toString(parts), 7, parts.length);
final String index = parts[0];
final int numberOfShards = Integer.parseInt(parts[1]);
final String type = parts[2];
final String id = parts[3];
final String routing = "null".equals(parts[4]) ? null : parts[4];
final int pre20ExpectedShardId = Integer.parseInt(parts[5]); // not needed anymore - old hashing is gone
final int currentExpectedShard = Integer.parseInt(parts[6]);
OperationRouting operationRouting = new OperationRouting(Settings.EMPTY, new ClusterSettings(Settings.EMPTY,
ClusterSettings.BUILT_IN_CLUSTER_SETTINGS));
for (Version version : VersionUtils.allReleasedVersions()) {
if (version.onOrAfter(Version.V_2_0_0) == false) {
// unsupported version, no need to test
continue;
}
final Settings settings = settings(version).build();
IndexMetaData indexMetaData = IndexMetaData.builder(index).settings(settings).numberOfShards(numberOfShards)
.numberOfReplicas(randomInt(3)).build();
MetaData.Builder metaData = MetaData.builder().put(indexMetaData, false);
RoutingTable routingTable = RoutingTable.builder().addAsNew(indexMetaData).build();
ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY))
.metaData(metaData).routingTable(routingTable).build();
final int shardId = operationRouting.indexShards(clusterState, index, id, routing).shardId().getId();
assertEquals(currentExpectedShard, shardId);
}
}
}
}
}

View File

@ -75,7 +75,7 @@ public class IndexFolderUpgraderTests extends ESTestCase {
Settings settings = Settings.builder() Settings settings = Settings.builder()
.put(nodeSettings) .put(nodeSettings)
.put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID()) .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0)
.put(IndexMetaData.SETTING_DATA_PATH, customPath.toAbsolutePath().toString()) .put(IndexMetaData.SETTING_DATA_PATH, customPath.toAbsolutePath().toString())
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5)) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5))
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
@ -104,7 +104,7 @@ public class IndexFolderUpgraderTests extends ESTestCase {
Settings settings = Settings.builder() Settings settings = Settings.builder()
.put(nodeSettings) .put(nodeSettings)
.put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID()) .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0)
.put(IndexMetaData.SETTING_DATA_PATH, customPath.toAbsolutePath().toString()) .put(IndexMetaData.SETTING_DATA_PATH, customPath.toAbsolutePath().toString())
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5)) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5))
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
@ -143,7 +143,7 @@ public class IndexFolderUpgraderTests extends ESTestCase {
Settings settings = Settings.builder() Settings settings = Settings.builder()
.put(nodeSettings) .put(nodeSettings)
.put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID()) .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5)) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5))
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
.build(); .build();
@ -168,7 +168,7 @@ public class IndexFolderUpgraderTests extends ESTestCase {
Settings settings = Settings.builder() Settings settings = Settings.builder()
.put(nodeSettings) .put(nodeSettings)
.put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID()) .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5)) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5))
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
.build(); .build();

View File

@ -38,10 +38,7 @@ import org.elasticsearch.test.VersionUtils;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import static java.util.Collections.emptyList; import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap; import static java.util.Collections.emptyMap;
@ -105,19 +102,6 @@ public class AnalysisRegistryTests extends ESTestCase {
assertTrue(e.getMessage().contains("[index.analysis.analyzer.default_index] is not supported")); assertTrue(e.getMessage().contains("[index.analysis.analyzer.default_index] is not supported"));
} }
public void testBackCompatOverrideDefaultIndexAnalyzer() {
Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(),
VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1));
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
IndexAnalyzers indexAnalyzers = registry.build(IndexSettingsModule.newIndexSettings("index", settings),
singletonMap("default_index", analyzerProvider("default_index")), emptyMap(), emptyMap(), emptyMap(), emptyMap());
assertThat(indexAnalyzers.getDefaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
assertThat(indexAnalyzers.getDefaultSearchAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
assertThat(indexAnalyzers.getDefaultSearchQuoteAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class));
assertWarnings("setting [index.analysis.analyzer.default_index] is deprecated, use [index.analysis.analyzer.default] " +
"instead for index [index]");
}
public void testOverrideDefaultSearchAnalyzer() { public void testOverrideDefaultSearchAnalyzer() {
Version version = VersionUtils.randomVersion(random()); Version version = VersionUtils.randomVersion(random());
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
@ -128,22 +112,6 @@ public class AnalysisRegistryTests extends ESTestCase {
assertThat(indexAnalyzers.getDefaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat(indexAnalyzers.getDefaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
} }
public void testBackCompatOverrideDefaultIndexAndSearchAnalyzer() {
Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(),
VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1));
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
Map<String, AnalyzerProvider<?>> analyzers = new HashMap<>();
analyzers.put("default_index", analyzerProvider("default_index"));
analyzers.put("default_search", analyzerProvider("default_search"));
IndexAnalyzers indexAnalyzers = registry.build(IndexSettingsModule.newIndexSettings("index", settings),
analyzers, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap());
assertThat(indexAnalyzers.getDefaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
assertThat(indexAnalyzers.getDefaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
assertThat(indexAnalyzers.getDefaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class));
assertWarnings("setting [index.analysis.analyzer.default_index] is deprecated, use [index.analysis.analyzer.default] " +
"instead for index [index]");
}
public void testConfigureCamelCaseTokenFilter() throws IOException { public void testConfigureCamelCaseTokenFilter() throws IOException {
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build();
Settings indexSettings = Settings.builder() Settings indexSettings = Settings.builder()

View File

@ -56,18 +56,18 @@ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase {
public void testThatInstancesAreTheSameAlwaysForKeywordAnalyzer() { public void testThatInstancesAreTheSameAlwaysForKeywordAnalyzer() {
assertThat(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.CURRENT), assertThat(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.CURRENT),
is(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.V_2_0_0))); is(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.V_5_0_0)));
} }
public void testThatInstancesAreCachedAndReused() { public void testThatInstancesAreCachedAndReused() {
assertSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT), assertSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT),
PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT)); PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT));
// same lucene version should be cached // same lucene version should be cached
assertSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_2_0_0), assertSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_5_2_2_UNRELEASED),
PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_2_0_1)); PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_5_2_3_UNRELEASED));
assertNotSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_2_0_0), assertNotSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_5_0_0),
PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_2_2_0)); PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_5_0_1));
} }
public void testThatAnalyzersAreUsedInMapping() throws IOException { public void testThatAnalyzersAreUsedInMapping() throws IOException {

View File

@ -31,16 +31,14 @@ import org.apache.lucene.index.LogByteSizeMergePolicy;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.mapper.BinaryFieldMapper; import org.elasticsearch.index.mapper.BinaryFieldMapper;
import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.mapper.Mapper.BuilderContext;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
@ -53,7 +51,6 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.VersionUtils;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
@ -61,7 +58,6 @@ import java.io.IOException;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import static org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.sameInstance; import static org.hamcrest.Matchers.sameInstance;
@ -135,9 +131,7 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase {
@Before @Before
public void setup() throws Exception { public void setup() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_3_0); // we need 2.x so that fielddata is allowed on string fields indexService = createIndex("test");
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
indexService = createIndex("test", settings);
mapperService = indexService.mapperService(); mapperService = indexService.mapperService();
indicesFieldDataCache = getInstanceFromNode(IndicesService.class).getIndicesFieldDataCache(); indicesFieldDataCache = getInstanceFromNode(IndicesService.class).getIndicesFieldDataCache();
ifdService = indexService.fieldData(); ifdService = indexService.fieldData();

View File

@ -18,23 +18,18 @@
*/ */
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.VersionUtils;
import org.junit.Before; import org.junit.Before;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;
import java.util.Set; import java.util.Set;
import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
public abstract class AbstractNumericFieldMapperTestCase extends ESSingleNodeTestCase { public abstract class AbstractNumericFieldMapperTestCase extends ESSingleNodeTestCase {
@ -117,20 +112,6 @@ public abstract class AbstractNumericFieldMapperTestCase extends ESSingleNodeTes
); );
assertThat(e.getMessage(), containsString("name cannot be empty string")); assertThat(e.getMessage(), containsString("name cannot be empty string"));
} }
// before 5.x
Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5);
Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build();
indexService = createIndex("test_old", oldIndexSettings);
parser = indexService.mapperService().documentMapperParser();
for (String type : TYPES) {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("").field("type", type).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, defaultMapper.mappingSource().string());
}
} }
} }

View File

@ -54,7 +54,8 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testExternalValues() throws Exception { public void testExternalValues() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
IndexService indexService = createIndex("test", settings); IndexService indexService = createIndex("test", settings);
MapperRegistry mapperRegistry = new MapperRegistry( MapperRegistry mapperRegistry = new MapperRegistry(
@ -102,7 +103,8 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testExternalValuesWithMultifield() throws Exception { public void testExternalValuesWithMultifield() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
IndexService indexService = createIndex("test", settings); IndexService indexService = createIndex("test", settings);
Map<String, Mapper.TypeParser> mapperParsers = new HashMap<>(); Map<String, Mapper.TypeParser> mapperParsers = new HashMap<>();
@ -167,7 +169,8 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testExternalValuesWithMultifieldTwoLevels() throws Exception { public void testExternalValuesWithMultifieldTwoLevels() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
IndexService indexService = createIndex("test", settings); IndexService indexService = createIndex("test", settings);
Map<String, Mapper.TypeParser> mapperParsers = new HashMap<>(); Map<String, Mapper.TypeParser> mapperParsers = new HashMap<>();

View File

@ -148,32 +148,16 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBo
GeoBoundingBoxQueryBuilder builder = createTestQueryBuilder(); GeoBoundingBoxQueryBuilder builder = createTestQueryBuilder();
tester.invalidateCoordinate(builder.setValidationMethod(GeoValidationMethod.COERCE), false); tester.invalidateCoordinate(builder.setValidationMethod(GeoValidationMethod.COERCE), false);
except = builder.checkLatLon(true); except = builder.checkLatLon();
assertNull("Inner post 2.0 validation w/ coerce should ignore invalid " assertNull("validation w/ coerce should ignore invalid "
+ tester.getClass().getName()
+ " coordinate: "
+ tester.invalidCoordinate + " ",
except);
tester.invalidateCoordinate(builder.setValidationMethod(GeoValidationMethod.COERCE), false);
except = builder.checkLatLon(false);
assertNull("Inner pre 2.0 validation w/ coerce should ignore invalid coordinate: "
+ tester.getClass().getName() + tester.getClass().getName()
+ " coordinate: " + " coordinate: "
+ tester.invalidCoordinate + " ", + tester.invalidCoordinate + " ",
except); except);
tester.invalidateCoordinate(builder.setValidationMethod(GeoValidationMethod.STRICT), false); tester.invalidateCoordinate(builder.setValidationMethod(GeoValidationMethod.STRICT), false);
except = builder.checkLatLon(true); except = builder.checkLatLon();
assertNull("Inner pre 2.0 validation w/o coerce should ignore invalid coordinate for old indexes: " assertNotNull("validation w/o coerce should detect invalid coordinate: "
+ tester.getClass().getName()
+ " coordinate: "
+ tester.invalidCoordinate,
except);
tester.invalidateCoordinate(builder.setValidationMethod(GeoValidationMethod.STRICT), false);
except = builder.checkLatLon(false);
assertNotNull("Inner post 2.0 validation w/o coerce should detect invalid coordinate: "
+ tester.getClass().getName() + tester.getClass().getName()
+ " coordinate: " + " coordinate: "
+ tester.invalidCoordinate, + tester.invalidCoordinate,

View File

@ -20,8 +20,6 @@ package org.elasticsearch.index.similarity;
import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.BM25Similarity;
import org.apache.lucene.search.similarities.ClassicSimilarity; import org.apache.lucene.search.similarities.ClassicSimilarity;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
@ -51,20 +49,9 @@ public class SimilarityServiceTests extends ESTestCase {
} }
} }
// Pre v3 indices could override built-in similarities
public void testOverrideBuiltInSimilarityPreV3() {
Settings settings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0)
.put("index.similarity.BM25.type", "classic")
.build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", settings);
SimilarityService service = new SimilarityService(indexSettings, Collections.emptyMap());
assertTrue(service.getSimilarity("BM25") instanceof ClassicSimilarityProvider);
}
// Tests #16594
public void testOverrideDefaultSimilarity() { public void testOverrideDefaultSimilarity() {
Settings settings = Settings.builder().put("index.similarity.default.type", "classic").build(); Settings settings = Settings.builder().put("index.similarity.default.type", "classic")
.build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", settings); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", settings);
SimilarityService service = new SimilarityService(indexSettings, Collections.emptyMap()); SimilarityService service = new SimilarityService(indexSettings, Collections.emptyMap());
assertTrue(service.getDefaultSimilarity() instanceof ClassicSimilarity); assertTrue(service.getDefaultSimilarity() instanceof ClassicSimilarity);

View File

@ -19,12 +19,12 @@
package org.elasticsearch.index.similarity; package org.elasticsearch.index.similarity;
import org.apache.lucene.search.similarities.BooleanSimilarity;
import org.apache.lucene.search.similarities.ClassicSimilarity;
import org.apache.lucene.search.similarities.DFISimilarity;
import org.apache.lucene.search.similarities.AfterEffectL; import org.apache.lucene.search.similarities.AfterEffectL;
import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.BM25Similarity;
import org.apache.lucene.search.similarities.BasicModelG; import org.apache.lucene.search.similarities.BasicModelG;
import org.apache.lucene.search.similarities.BooleanSimilarity;
import org.apache.lucene.search.similarities.ClassicSimilarity;
import org.apache.lucene.search.similarities.DFISimilarity;
import org.apache.lucene.search.similarities.DFRSimilarity; import org.apache.lucene.search.similarities.DFRSimilarity;
import org.apache.lucene.search.similarities.DistributionSPL; import org.apache.lucene.search.similarities.DistributionSPL;
import org.apache.lucene.search.similarities.IBSimilarity; import org.apache.lucene.search.similarities.IBSimilarity;
@ -34,19 +34,15 @@ import org.apache.lucene.search.similarities.LMJelinekMercerSimilarity;
import org.apache.lucene.search.similarities.LambdaTTF; import org.apache.lucene.search.similarities.LambdaTTF;
import org.apache.lucene.search.similarities.NormalizationH2; import org.apache.lucene.search.similarities.NormalizationH2;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.VersionUtils;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;
@ -248,32 +244,4 @@ public class SimilarityTests extends ESSingleNodeTestCase {
assertThat(e.getMessage(), equalTo("Unknown Similarity type [unknown_similarity] for field [field1]")); assertThat(e.getMessage(), equalTo("Unknown Similarity type [unknown_similarity] for field [field1]"));
} }
} }
public void testSimilarityDefaultBackCompat() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field1")
.field("similarity", "default")
.field("type", "text")
.endObject()
.endObject()
.endObject()
.endObject().string();
Settings settings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_2_0))
.build();
DocumentMapperParser parser = createIndex("test_v2.x", settings).mapperService().documentMapperParser();
DocumentMapper documentMapper = parser.parse("type", new CompressedXContent(mapping));
assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(ClassicSimilarityProvider.class));
assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity().name(), equalTo("classic"));
parser = createIndex("test_v3.x").mapperService().documentMapperParser();
try {
parser.parse("type", new CompressedXContent(mapping));
fail("Expected MappingParsingException");
} catch (MapperParsingException e) {
assertThat(e.getMessage(), equalTo("Unknown Similarity type [default] for field [field1]"));
}
}
} }

View File

@ -23,10 +23,7 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.ar.ArabicNormalizationFilter; import org.apache.lucene.analysis.ar.ArabicNormalizationFilter;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.apache.lucene.analysis.de.GermanAnalyzer;
import org.apache.lucene.analysis.en.EnglishAnalyzer;
import org.apache.lucene.analysis.fa.PersianNormalizationFilter; import org.apache.lucene.analysis.fa.PersianNormalizationFilter;
import org.apache.lucene.analysis.hunspell.Dictionary; import org.apache.lucene.analysis.hunspell.Dictionary;
import org.apache.lucene.analysis.miscellaneous.KeywordRepeatFilter; import org.apache.lucene.analysis.miscellaneous.KeywordRepeatFilter;
@ -123,83 +120,6 @@ public class AnalysisModuleTests extends ModuleTestCase {
assertTokenFilter("arabic_normalization", ArabicNormalizationFilter.class); assertTokenFilter("arabic_normalization", ArabicNormalizationFilter.class);
} }
public void testAnalyzerAlias() throws IOException {
Settings settings = Settings.builder()
.put("index.analysis.analyzer.foobar.alias","default")
.put("index.analysis.analyzer.foobar.type", "keyword")
.put("index.analysis.analyzer.foobar_search.alias","default_search")
.put("index.analysis.analyzer.foobar_search.type","english")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
// analyzer aliases are only allowed in 2.x indices
.put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_3_5))
.build();
AnalysisRegistry newRegistry = getNewRegistry(settings);
IndexAnalyzers indexAnalyzers = getIndexAnalyzers(newRegistry, settings);
assertThat(indexAnalyzers.get("default").analyzer(), is(instanceOf(KeywordAnalyzer.class)));
assertThat(indexAnalyzers.get("default_search").analyzer(), is(instanceOf(EnglishAnalyzer.class)));
assertWarnings("setting [index.analysis.analyzer.foobar.alias] is only allowed on index [test] because it was created before " +
"5.x; analyzer aliases can no longer be created on new indices.",
"setting [index.analysis.analyzer.foobar_search.alias] is only allowed on index [test] because it was created before " +
"5.x; analyzer aliases can no longer be created on new indices.");
}
public void testAnalyzerAliasReferencesAlias() throws IOException {
Settings settings = Settings.builder()
.put("index.analysis.analyzer.foobar.alias","default")
.put("index.analysis.analyzer.foobar.type", "german")
.put("index.analysis.analyzer.foobar_search.alias","default_search")
.put("index.analysis.analyzer.foobar_search.type", "default")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
// analyzer aliases are only allowed in 2.x indices
.put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_3_5))
.build();
AnalysisRegistry newRegistry = getNewRegistry(settings);
IndexAnalyzers indexAnalyzers = getIndexAnalyzers(newRegistry, settings);
assertThat(indexAnalyzers.get("default").analyzer(), is(instanceOf(GermanAnalyzer.class)));
// analyzer types are bound early before we resolve aliases
assertThat(indexAnalyzers.get("default_search").analyzer(), is(instanceOf(StandardAnalyzer.class)));
assertWarnings("setting [index.analysis.analyzer.foobar.alias] is only allowed on index [test] because it was created before " +
"5.x; analyzer aliases can no longer be created on new indices.",
"setting [index.analysis.analyzer.foobar_search.alias] is only allowed on index [test] because it was created before " +
"5.x; analyzer aliases can no longer be created on new indices.");
}
public void testAnalyzerAliasDefault() throws IOException {
Settings settings = Settings.builder()
.put("index.analysis.analyzer.foobar.alias","default")
.put("index.analysis.analyzer.foobar.type", "keyword")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
// analyzer aliases are only allowed in 2.x indices
.put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_3_5))
.build();
AnalysisRegistry newRegistry = getNewRegistry(settings);
IndexAnalyzers indexAnalyzers = getIndexAnalyzers(newRegistry, settings);
assertThat(indexAnalyzers.get("default").analyzer(), is(instanceOf(KeywordAnalyzer.class)));
assertThat(indexAnalyzers.get("default_search").analyzer(), is(instanceOf(KeywordAnalyzer.class)));
assertWarnings("setting [index.analysis.analyzer.foobar.alias] is only allowed on index [test] because it was created before " +
"5.x; analyzer aliases can no longer be created on new indices.");
}
public void testAnalyzerAliasMoreThanOnce() throws IOException {
Settings settings = Settings.builder()
.put("index.analysis.analyzer.foobar.alias","default")
.put("index.analysis.analyzer.foobar.type", "keyword")
.put("index.analysis.analyzer.foobar1.alias","default")
.put("index.analysis.analyzer.foobar1.type", "english")
// analyzer aliases are only allowed in 2.x indices
.put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_3_5))
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
AnalysisRegistry newRegistry = getNewRegistry(settings);
IllegalStateException ise = expectThrows(IllegalStateException.class, () -> getIndexAnalyzers(newRegistry, settings));
assertEquals("alias [default] is already used by [foobar]", ise.getMessage());
assertWarnings("setting [index.analysis.analyzer.foobar.alias] is only allowed on index [test] because it was created before " +
"5.x; analyzer aliases can no longer be created on new indices.",
"setting [index.analysis.analyzer.foobar1.alias] is only allowed on index [test] because it was created before " +
"5.x; analyzer aliases can no longer be created on new indices.");
}
public void testAnalyzerAliasNotAllowedPost5x() throws IOException { public void testAnalyzerAliasNotAllowedPost5x() throws IOException {
Settings settings = Settings.builder() Settings settings = Settings.builder()
.put("index.analysis.analyzer.foobar.type", "standard") .put("index.analysis.analyzer.foobar.type", "standard")
@ -218,7 +138,7 @@ public class AnalysisModuleTests extends ModuleTestCase {
Settings settings2 = Settings.builder() Settings settings2 = Settings.builder()
.loadFromStream(yaml, getClass().getResourceAsStream(yaml)) .loadFromStream(yaml, getClass().getResourceAsStream(yaml))
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0)
.build(); .build();
AnalysisRegistry newRegistry = getNewRegistry(settings2); AnalysisRegistry newRegistry = getNewRegistry(settings2);
IndexAnalyzers indexAnalyzers = getIndexAnalyzers(newRegistry, settings2); IndexAnalyzers indexAnalyzers = getIndexAnalyzers(newRegistry, settings2);
@ -231,8 +151,10 @@ public class AnalysisModuleTests extends ModuleTestCase {
// analysis service has the expected version // analysis service has the expected version
assertThat(indexAnalyzers.get("standard").analyzer(), is(instanceOf(StandardAnalyzer.class))); assertThat(indexAnalyzers.get("standard").analyzer(), is(instanceOf(StandardAnalyzer.class)));
assertEquals(Version.V_2_0_0.luceneVersion, indexAnalyzers.get("standard").analyzer().getVersion()); assertEquals(Version.V_5_0_0.luceneVersion,
assertEquals(Version.V_2_0_0.luceneVersion, indexAnalyzers.get("thai").analyzer().getVersion()); indexAnalyzers.get("standard").analyzer().getVersion());
assertEquals(Version.V_5_0_0.luceneVersion,
indexAnalyzers.get("thai").analyzer().getVersion());
assertThat(indexAnalyzers.get("custom7").analyzer(), is(instanceOf(StandardAnalyzer.class))); assertThat(indexAnalyzers.get("custom7").analyzer(), is(instanceOf(StandardAnalyzer.class)));
assertEquals(org.apache.lucene.util.Version.fromBits(3,6,0), indexAnalyzers.get("custom7").analyzer().getVersion()); assertEquals(org.apache.lucene.util.Version.fromBits(3,6,0), indexAnalyzers.get("custom7").analyzer().getVersion());
@ -355,40 +277,6 @@ public class AnalysisModuleTests extends ModuleTestCase {
} }
} }
public void testUnderscoreInAnalyzerNameAlias() throws IOException {
Settings settings = Settings.builder()
.put("index.analysis.analyzer.valid_name.tokenizer", "keyword")
.put("index.analysis.analyzer.valid_name.alias", "_invalid_name")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
// analyzer aliases are only allowed for 2.x indices
.put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_3_5))
.build();
try {
getIndexAnalyzers(settings);
fail("This should fail with IllegalArgumentException because the analyzers alias starts with _");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("analyzer name must not start with '_'. got \"_invalid_name\""));
}
assertWarnings("setting [index.analysis.analyzer.valid_name.alias] is only allowed on index [test] because it was " +
"created before 5.x; analyzer aliases can no longer be created on new indices.");
}
public void testDeprecatedPositionOffsetGap() throws IOException {
Settings settings = Settings.builder()
.put("index.analysis.analyzer.custom.tokenizer", "standard")
.put("index.analysis.analyzer.custom.position_offset_gap", "128")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build();
try {
getIndexAnalyzers(settings);
fail("Analyzer should fail if it has position_offset_gap");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("Option [position_offset_gap] in Custom Analyzer [custom] " +
"has been renamed, please use [position_increment_gap] instead."));
}
}
public void testRegisterHunspellDictionary() throws Exception { public void testRegisterHunspellDictionary() throws Exception {
Settings settings = Settings.builder() Settings settings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())

View File

@ -182,12 +182,12 @@ public class PluginInfoTests extends ESTestCase {
"description", "fake desc", "description", "fake desc",
"name", "my_plugin", "name", "my_plugin",
"version", "1.0", "version", "1.0",
"elasticsearch.version", Version.V_2_0_0.toString()); "elasticsearch.version", Version.V_5_0_0.toString());
try { try {
PluginInfo.readFromProperties(pluginDir); PluginInfo.readFromProperties(pluginDir);
fail("expected old elasticsearch version exception"); fail("expected old elasticsearch version exception");
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("was designed for version [2.0.0]")); assertTrue(e.getMessage().contains("was designed for version [5.0.0]"));
} }
} }

View File

@ -65,7 +65,8 @@ public class GeoDistanceIT extends ESIntegTestCase {
return Arrays.asList(InternalSettingsPlugin.class); // uses index.version.created return Arrays.asList(InternalSettingsPlugin.class); // uses index.version.created
} }
private Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); private Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0,
Version.CURRENT);
private IndexRequestBuilder indexCity(String idx, String name, String... latLons) throws Exception { private IndexRequestBuilder indexCity(String idx, String name, String... latLons) throws Exception {
XContentBuilder source = jsonBuilder().startObject().field("city", name); XContentBuilder source = jsonBuilder().startObject().field("city", name);

View File

@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.bucket;
import com.carrotsearch.hppc.ObjectIntHashMap; import com.carrotsearch.hppc.ObjectIntHashMap;
import com.carrotsearch.hppc.ObjectIntMap; import com.carrotsearch.hppc.ObjectIntMap;
import com.carrotsearch.hppc.cursors.ObjectIntCursor; import com.carrotsearch.hppc.cursors.ObjectIntCursor;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
@ -64,7 +65,8 @@ public class GeoHashGridIT extends ESIntegTestCase {
return Arrays.asList(InternalSettingsPlugin.class); // uses index.version.created return Arrays.asList(InternalSettingsPlugin.class); // uses index.version.created
} }
private Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); private Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0,
Version.CURRENT);
static ObjectIntMap<String> expectedDocCountsForGeoHash = null; static ObjectIntMap<String> expectedDocCountsForGeoHash = null;
static ObjectIntMap<String> multiValuedExpectedDocCountsForGeoHash = null; static ObjectIntMap<String> multiValuedExpectedDocCountsForGeoHash = null;

View File

@ -51,7 +51,8 @@ public class GeoBoundingBoxIT extends ESIntegTestCase {
} }
public void testSimpleBoundingBoxTest() throws Exception { public void testSimpleBoundingBoxTest() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location").field("type", "geo_point"); .startObject("properties").startObject("location").field("type", "geo_point");
@ -122,7 +123,8 @@ public class GeoBoundingBoxIT extends ESIntegTestCase {
} }
public void testLimit2BoundingBox() throws Exception { public void testLimit2BoundingBox() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location").field("type", "geo_point"); .startObject("properties").startObject("location").field("type", "geo_point");
@ -174,7 +176,8 @@ public class GeoBoundingBoxIT extends ESIntegTestCase {
} }
public void testCompleteLonRange() throws Exception { public void testCompleteLonRange() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location").field("type", "geo_point"); .startObject("properties").startObject("location").field("type", "geo_point");

View File

@ -101,7 +101,8 @@ public class GeoDistanceIT extends ESIntegTestCase {
@Before @Before
public void setupTestIndex() throws IOException { public void setupTestIndex() throws IOException {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location").field("type", "geo_point"); .startObject("properties").startObject("location").field("type", "geo_point");

View File

@ -367,7 +367,8 @@ public class GeoFilterIT extends ESIntegTestCase {
public void testBulk() throws Exception { public void testBulk() throws Exception {
byte[] bulkAction = unZipData("/org/elasticsearch/search/geo/gzippedmap.gz"); byte[] bulkAction = unZipData("/org/elasticsearch/search/geo/gzippedmap.gz");
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()
.startObject() .startObject()

View File

@ -55,7 +55,8 @@ public class GeoPolygonIT extends ESIntegTestCase {
@Override @Override
protected void setupSuiteScopeCluster() throws Exception { protected void setupSuiteScopeCluster() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location").field("type", "geo_point"); .startObject("properties").startObject("location").field("type", "geo_point");

View File

@ -58,7 +58,8 @@ public class GeoDistanceIT extends ESIntegTestCase {
} }
public void testDistanceSortingMVFields() throws Exception { public void testDistanceSortingMVFields() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("locations").field("type", "geo_point"); .startObject("locations").field("type", "geo_point");
@ -187,7 +188,8 @@ public class GeoDistanceIT extends ESIntegTestCase {
// Regression bug: // Regression bug:
// https://github.com/elastic/elasticsearch/issues/2851 // https://github.com/elastic/elasticsearch/issues/2851
public void testDistanceSortingWithMissingGeoPoint() throws Exception { public void testDistanceSortingWithMissingGeoPoint() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("locations").field("type", "geo_point"); .startObject("locations").field("type", "geo_point");
@ -231,7 +233,8 @@ public class GeoDistanceIT extends ESIntegTestCase {
} }
public void testDistanceSortingNestedFields() throws Exception { public void testDistanceSortingNestedFields() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("company").startObject("properties") XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("company").startObject("properties")
.startObject("name").field("type", "text").endObject().startObject("branches").field("type", "nested") .startObject("name").field("type", "text").endObject().startObject("branches").field("type", "nested")
@ -379,7 +382,8 @@ public class GeoDistanceIT extends ESIntegTestCase {
* Issue 3073 * Issue 3073
*/ */
public void testGeoDistanceFilter() throws IOException { public void testGeoDistanceFilter() throws IOException {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0,
Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
double lat = 40.720611; double lat = 40.720611;
double lon = -73.998776; double lon = -73.998776;

View File

@ -69,7 +69,8 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
* |___________________________ * |___________________________
* 1 2 3 4 5 6 7 * 1 2 3 4 5 6 7
*/ */
Version version = randomBoolean() ? Version.CURRENT : VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Version version = randomBoolean() ? Version.CURRENT
: VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", LOCATION_FIELD, "type=geo_point")); assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", LOCATION_FIELD, "type=geo_point"));
XContentBuilder d1Builder = jsonBuilder(); XContentBuilder d1Builder = jsonBuilder();
@ -134,7 +135,8 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
* d1 = (0, 1), (0, 4), (0, 10); so avg. distance is 5, median distance is 4 * d1 = (0, 1), (0, 4), (0, 10); so avg. distance is 5, median distance is 4
* d2 = (0, 1), (0, 5), (0, 6); so avg. distance is 4, median distance is 5 * d2 = (0, 1), (0, 5), (0, 6); so avg. distance is 4, median distance is 5
*/ */
Version version = randomBoolean() ? Version.CURRENT : VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Version version = randomBoolean() ? Version.CURRENT
: VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", LOCATION_FIELD, "type=geo_point")); assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", LOCATION_FIELD, "type=geo_point"));
XContentBuilder d1Builder = jsonBuilder(); XContentBuilder d1Builder = jsonBuilder();
@ -194,7 +196,8 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
* |______________________ * |______________________
* 1 2 3 4 5 6 * 1 2 3 4 5 6
*/ */
Version version = randomBoolean() ? Version.CURRENT : VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Version version = randomBoolean() ? Version.CURRENT
: VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.CURRENT);
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", LOCATION_FIELD, "type=geo_point")); assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", LOCATION_FIELD, "type=geo_point"));
XContentBuilder d1Builder = jsonBuilder(); XContentBuilder d1Builder = jsonBuilder();

View File

@ -35,8 +35,6 @@ import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -73,7 +71,6 @@ import org.elasticsearch.script.MockScriptPlugin;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.VersionUtils;
import org.junit.Before; import org.junit.Before;
import java.io.IOException; import java.io.IOException;
@ -85,7 +82,6 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.function.Function; import java.util.function.Function;
import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery; import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery;
@ -485,7 +481,6 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testEmptyName() throws Exception { public void testEmptyName() throws Exception {
// after 5.x
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("").field("type", "percolator").endObject().endObject() .startObject("properties").startObject("").field("type", "percolator").endObject().endObject()
.endObject().endObject().string(); .endObject().endObject().string();
@ -495,14 +490,6 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
() -> parser.parse("type1", new CompressedXContent(mapping)) () -> parser.parse("type1", new CompressedXContent(mapping))
); );
assertThat(e.getMessage(), containsString("name cannot be empty string")); assertThat(e.getMessage(), containsString("name cannot be empty string"));
// before 5.x
Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5);
Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build();
DocumentMapperParser parser2x = createIndex("test_old", oldIndexSettings).mapperService().documentMapperParser();
DocumentMapper defaultMapper = parser2x.parse("type1", new CompressedXContent(mapping));
assertEquals(mapping, defaultMapper.mappingSource().string());
} }
public void testImplicitlySetDefaultScriptLang() throws Exception { public void testImplicitlySetDefaultScriptLang() throws Exception {

View File

@ -69,7 +69,7 @@ final class RemoteRequestBuilders {
if (searchRequest.source().sorts() != null) { if (searchRequest.source().sorts() != null) {
boolean useScan = false; boolean useScan = false;
// Detect if we should use search_type=scan rather than a sort // Detect if we should use search_type=scan rather than a sort
if (remoteVersion.before(Version.V_2_1_0)) { if (remoteVersion.before(Version.fromId(2010099))) {
for (SortBuilder<?> sort : searchRequest.source().sorts()) { for (SortBuilder<?> sort : searchRequest.source().sorts()) {
if (sort instanceof FieldSortBuilder) { if (sort instanceof FieldSortBuilder) {
FieldSortBuilder f = (FieldSortBuilder) sort; FieldSortBuilder f = (FieldSortBuilder) sort;
@ -90,7 +90,7 @@ final class RemoteRequestBuilders {
params.put("sort", sorts.toString()); params.put("sort", sorts.toString());
} }
} }
if (remoteVersion.before(Version.V_2_0_0)) { if (remoteVersion.before(Version.fromId(2000099))) {
// Versions before 2.0.0 need prompting to return interesting fields. Note that timestamp isn't available at all.... // Versions before 2.0.0 need prompting to return interesting fields. Note that timestamp isn't available at all....
searchRequest.source().storedField("_parent").storedField("_routing").storedField("_ttl"); searchRequest.source().storedField("_parent").storedField("_routing").storedField("_ttl");
} }
@ -172,7 +172,7 @@ final class RemoteRequestBuilders {
} }
static HttpEntity scrollEntity(String scroll, Version remoteVersion) { static HttpEntity scrollEntity(String scroll, Version remoteVersion) {
if (remoteVersion.before(Version.V_2_0_0)) { if (remoteVersion.before(Version.fromId(2000099))) {
// Versions before 2.0.0 extract the plain scroll_id from the body // Versions before 2.0.0 extract the plain scroll_id from the body
return new StringEntity(scroll, ContentType.TEXT_PLAIN); return new StringEntity(scroll, ContentType.TEXT_PLAIN);
} }
@ -186,7 +186,7 @@ final class RemoteRequestBuilders {
} }
static HttpEntity clearScrollEntity(String scroll, Version remoteVersion) { static HttpEntity clearScrollEntity(String scroll, Version remoteVersion) {
if (remoteVersion.before(Version.V_2_0_0)) { if (remoteVersion.before(Version.fromId(2000099))) {
// Versions before 2.0.0 extract the plain scroll_id from the body // Versions before 2.0.0 extract the plain scroll_id from the body
return new StringEntity(scroll, ContentType.TEXT_PLAIN); return new StringEntity(scroll, ContentType.TEXT_PLAIN);
} }

View File

@ -128,7 +128,8 @@ public class RemoteScrollableHitSource extends ScrollableHitSource {
private void logFailure(Exception e) { private void logFailure(Exception e) {
if (e instanceof ResponseException) { if (e instanceof ResponseException) {
ResponseException re = (ResponseException) e; ResponseException re = (ResponseException) e;
if (remoteVersion.before(Version.V_2_0_0) && re.getResponse().getStatusLine().getStatusCode() == 404) { if (remoteVersion.before(Version.fromId(2000099))
&& re.getResponse().getStatusLine().getStatusCode() == 404) {
logger.debug((Supplier<?>) () -> new ParameterizedMessage( logger.debug((Supplier<?>) () -> new ParameterizedMessage(
"Failed to clear scroll [{}] from pre-2.0 Elasticsearch. This is normal if the request terminated " "Failed to clear scroll [{}] from pre-2.0 Elasticsearch. This is normal if the request terminated "
+ "normally as the scroll has already been cleared automatically.", scrollId), e); + "normally as the scroll has already been cleared automatically.", scrollId), e);

View File

@ -96,12 +96,12 @@ public class RemoteRequestBuildersTests extends ESTestCase {
SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder());
// Test sort:_doc for versions that support it. // Test sort:_doc for versions that support it.
Version remoteVersion = Version.fromId(between(Version.V_2_1_0_ID, Version.CURRENT.id)); Version remoteVersion = Version.fromId(between(2010099, Version.CURRENT.id));
searchRequest.source().sort("_doc"); searchRequest.source().sort("_doc");
assertThat(initialSearchParams(searchRequest, remoteVersion), hasEntry("sort", "_doc:asc")); assertThat(initialSearchParams(searchRequest, remoteVersion), hasEntry("sort", "_doc:asc"));
// Test search_type scan for versions that don't support sort:_doc. // Test search_type scan for versions that don't support sort:_doc.
remoteVersion = Version.fromId(between(0, Version.V_2_1_0_ID - 1)); remoteVersion = Version.fromId(between(0, 2010099 - 1));
assertThat(initialSearchParams(searchRequest, remoteVersion), hasEntry("search_type", "scan")); assertThat(initialSearchParams(searchRequest, remoteVersion), hasEntry("search_type", "scan"));
// Test sorting by some field. Version doesn't matter. // Test sorting by some field. Version doesn't matter.

View File

@ -144,7 +144,7 @@ public class RemoteScrollableHitSourceTests extends ESTestCase {
assertTrue(called.get()); assertTrue(called.get());
called.set(false); called.set(false);
sourceWithMockedRemoteCall(false, ContentType.APPLICATION_JSON, "main/2_3_3.json").lookupRemoteVersion(v -> { sourceWithMockedRemoteCall(false, ContentType.APPLICATION_JSON, "main/2_3_3.json").lookupRemoteVersion(v -> {
assertEquals(Version.V_2_3_3, v); assertEquals(Version.fromId(2030399), v);
called.set(true); called.set(true);
}); });
assertTrue(called.get()); assertTrue(called.get());

View File

@ -22,10 +22,7 @@ package org.elasticsearch.index.mapper.murmur3;
import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
@ -39,7 +36,6 @@ import org.elasticsearch.indices.mapper.MapperRegistry;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.VersionUtils;
import org.junit.Before; import org.junit.Before;
import java.util.Arrays; import java.util.Arrays;
@ -47,7 +43,6 @@ import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.function.Supplier; import java.util.function.Supplier;
import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { public class Murmur3FieldMapperTests extends ESSingleNodeTestCase {
@ -157,20 +152,5 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase {
() -> parser.parse("type", new CompressedXContent(mapping)) () -> parser.parse("type", new CompressedXContent(mapping))
); );
assertThat(e.getMessage(), containsString("name cannot be empty string")); assertThat(e.getMessage(), containsString("name cannot be empty string"));
// before 5.x
Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5);
Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build();
IndexService indexService2x = createIndex("test_old", oldIndexSettings);
Supplier<QueryShardContext> queryShardContext = () -> {
return indexService2x.newQueryShardContext(0, null, () -> { throw new UnsupportedOperationException(); });
};
DocumentMapperParser parser = new DocumentMapperParser(indexService2x.getIndexSettings(), indexService2x.mapperService(),
indexService2x.getIndexAnalyzers(), indexService2x.xContentRegistry(), indexService2x.similarityService(), mapperRegistry,
queryShardContext);
DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, defaultMapper.mappingSource().string());
} }
} }

View File

@ -123,7 +123,7 @@ public class ClientYamlTestSectionTests extends AbstractClientYamlTestFragmentPa
parser = createParser(YamlXContent.yamlXContent, parser = createParser(YamlXContent.yamlXContent,
"\"First test section\": \n" + "\"First test section\": \n" +
" - skip:\n" + " - skip:\n" +
" version: \"2.0.0 - 2.2.0\"\n" + " version: \"5.0.0 - 5.2.0\"\n" +
" reason: \"Update doesn't return metadata fields, waiting for #3259\"\n" + " reason: \"Update doesn't return metadata fields, waiting for #3259\"\n" +
" - do :\n" + " - do :\n" +
" catch: missing\n" + " catch: missing\n" +
@ -138,8 +138,9 @@ public class ClientYamlTestSectionTests extends AbstractClientYamlTestFragmentPa
assertThat(testSection, notNullValue()); assertThat(testSection, notNullValue());
assertThat(testSection.getName(), equalTo("First test section")); assertThat(testSection.getName(), equalTo("First test section"));
assertThat(testSection.getSkipSection(), notNullValue()); assertThat(testSection.getSkipSection(), notNullValue());
assertThat(testSection.getSkipSection().getLowerVersion(), equalTo(Version.V_2_0_0)); assertThat(testSection.getSkipSection().getLowerVersion(), equalTo(Version.V_5_0_0));
assertThat(testSection.getSkipSection().getUpperVersion(), equalTo(Version.V_2_2_0)); assertThat(testSection.getSkipSection().getUpperVersion(),
equalTo(Version.V_5_2_0_UNRELEASED));
assertThat(testSection.getSkipSection().getReason(), equalTo("Update doesn't return metadata fields, waiting for #3259")); assertThat(testSection.getSkipSection().getReason(), equalTo("Update doesn't return metadata fields, waiting for #3259"));
assertThat(testSection.getExecutableSections().size(), equalTo(2)); assertThat(testSection.getExecutableSections().size(), equalTo(2));
DoSection doSection = (DoSection)testSection.getExecutableSections().get(0); DoSection doSection = (DoSection)testSection.getExecutableSections().get(0);

View File

@ -66,10 +66,10 @@ public class ClientYamlTestSuiteTests extends AbstractClientYamlTestFragmentPars
" - match: {test_index.test_type.properties.text.analyzer: whitespace}\n" + " - match: {test_index.test_type.properties.text.analyzer: whitespace}\n" +
"\n" + "\n" +
"---\n" + "---\n" +
"\"Get type mapping - pre 1.0\":\n" + "\"Get type mapping - pre 5.0\":\n" +
"\n" + "\n" +
" - skip:\n" + " - skip:\n" +
" version: \"2.0.0 - \"\n" + " version: \"5.0.0 - \"\n" +
" reason: \"for newer versions the index name is always returned\"\n" + " reason: \"for newer versions the index name is always returned\"\n" +
"\n" + "\n" +
" - do:\n" + " - do:\n" +
@ -130,11 +130,13 @@ public class ClientYamlTestSuiteTests extends AbstractClientYamlTestFragmentPars
assertThat(matchAssertion.getField(), equalTo("test_index.test_type.properties.text.analyzer")); assertThat(matchAssertion.getField(), equalTo("test_index.test_type.properties.text.analyzer"));
assertThat(matchAssertion.getExpectedValue().toString(), equalTo("whitespace")); assertThat(matchAssertion.getExpectedValue().toString(), equalTo("whitespace"));
assertThat(restTestSuite.getTestSections().get(1).getName(), equalTo("Get type mapping - pre 1.0")); assertThat(restTestSuite.getTestSections().get(1).getName(),
equalTo("Get type mapping - pre 5.0"));
assertThat(restTestSuite.getTestSections().get(1).getSkipSection().isEmpty(), equalTo(false)); assertThat(restTestSuite.getTestSections().get(1).getSkipSection().isEmpty(), equalTo(false));
assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getReason(), assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getReason(),
equalTo("for newer versions the index name is always returned")); equalTo("for newer versions the index name is always returned"));
assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getLowerVersion(), equalTo(Version.V_2_0_0)); assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getLowerVersion(),
equalTo(Version.V_5_0_0));
assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getUpperVersion(), equalTo(Version.CURRENT)); assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getUpperVersion(), equalTo(Version.CURRENT));
assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().size(), equalTo(3)); assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().size(), equalTo(3));
assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().get(0), instanceOf(DoSection.class)); assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().get(0), instanceOf(DoSection.class));

View File

@ -20,7 +20,6 @@ package org.elasticsearch.test.rest.yaml.section;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.xcontent.yaml.YamlXContent; import org.elasticsearch.common.xcontent.yaml.YamlXContent;
import org.elasticsearch.test.rest.yaml.section.SetupSection;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.notNullValue;
@ -54,7 +53,7 @@ public class SetupSectionTests extends AbstractClientYamlTestFragmentParserTestC
public void testParseSetupAndSkipSectionNoSkip() throws Exception { public void testParseSetupAndSkipSectionNoSkip() throws Exception {
parser = createParser(YamlXContent.yamlXContent, parser = createParser(YamlXContent.yamlXContent,
" - skip:\n" + " - skip:\n" +
" version: \"2.0.0 - 2.3.0\"\n" + " version: \"5.0.0 - 5.3.0\"\n" +
" reason: \"Update doesn't return metadata fields, waiting for #3259\"\n" + " reason: \"Update doesn't return metadata fields, waiting for #3259\"\n" +
" - do:\n" + " - do:\n" +
" index1:\n" + " index1:\n" +
@ -75,8 +74,9 @@ public class SetupSectionTests extends AbstractClientYamlTestFragmentParserTestC
assertThat(setupSection, notNullValue()); assertThat(setupSection, notNullValue());
assertThat(setupSection.getSkipSection().isEmpty(), equalTo(false)); assertThat(setupSection.getSkipSection().isEmpty(), equalTo(false));
assertThat(setupSection.getSkipSection(), notNullValue()); assertThat(setupSection.getSkipSection(), notNullValue());
assertThat(setupSection.getSkipSection().getLowerVersion(), equalTo(Version.V_2_0_0)); assertThat(setupSection.getSkipSection().getLowerVersion(), equalTo(Version.V_5_0_0));
assertThat(setupSection.getSkipSection().getUpperVersion(), equalTo(Version.V_2_3_0)); assertThat(setupSection.getSkipSection().getUpperVersion(),
equalTo(Version.V_5_3_0_UNRELEASED));
assertThat(setupSection.getSkipSection().getReason(), equalTo("Update doesn't return metadata fields, waiting for #3259")); assertThat(setupSection.getSkipSection().getReason(), equalTo("Update doesn't return metadata fields, waiting for #3259"));
assertThat(setupSection.getDoSections().size(), equalTo(2)); assertThat(setupSection.getDoSections().size(), equalTo(2));
assertThat(setupSection.getDoSections().get(0).getApiCallSection().getApi(), equalTo("index1")); assertThat(setupSection.getDoSections().get(0).getApiCallSection().getApi(), equalTo("index1"));

View File

@ -34,16 +34,18 @@ import static org.hamcrest.Matchers.nullValue;
public class SkipSectionTests extends AbstractClientYamlTestFragmentParserTestCase { public class SkipSectionTests extends AbstractClientYamlTestFragmentParserTestCase {
public void testSkip() { public void testSkip() {
SkipSection section = new SkipSection("2.0.0 - 2.1.0", SkipSection section = new SkipSection("5.0.0 - 5.1.0",
randomBoolean() ? Collections.emptyList() : Collections.singletonList("warnings"), "foobar"); randomBoolean() ? Collections.emptyList() : Collections.singletonList("warnings"), "foobar");
assertFalse(section.skip(Version.CURRENT)); assertFalse(section.skip(Version.CURRENT));
assertTrue(section.skip(Version.V_2_0_0)); assertTrue(section.skip(Version.V_5_0_0));
section = new SkipSection(randomBoolean() ? null : "2.0.0 - 2.1.0", Collections.singletonList("boom"), "foobar"); section = new SkipSection(randomBoolean() ? null : "5.0.0 - 5.1.0",
Collections.singletonList("boom"), "foobar");
assertTrue(section.skip(Version.CURRENT)); assertTrue(section.skip(Version.CURRENT));
} }
public void testMessage() { public void testMessage() {
SkipSection section = new SkipSection("2.0.0 - 2.1.0", Collections.singletonList("warnings"), "foobar"); SkipSection section = new SkipSection("5.0.0 - 5.1.0",
Collections.singletonList("warnings"), "foobar");
assertEquals("[FOOBAR] skipped, reason: [foobar] unsupported features [warnings]", section.getSkipMessage("FOOBAR")); assertEquals("[FOOBAR] skipped, reason: [foobar] unsupported features [warnings]", section.getSkipMessage("FOOBAR"));
section = new SkipSection(null, Collections.singletonList("warnings"), "foobar"); section = new SkipSection(null, Collections.singletonList("warnings"), "foobar");
assertEquals("[FOOBAR] skipped, reason: [foobar] unsupported features [warnings]", section.getSkipMessage("FOOBAR")); assertEquals("[FOOBAR] skipped, reason: [foobar] unsupported features [warnings]", section.getSkipMessage("FOOBAR"));
@ -53,14 +55,14 @@ public class SkipSectionTests extends AbstractClientYamlTestFragmentParserTestCa
public void testParseSkipSectionVersionNoFeature() throws Exception { public void testParseSkipSectionVersionNoFeature() throws Exception {
parser = createParser(YamlXContent.yamlXContent, parser = createParser(YamlXContent.yamlXContent,
"version: \" - 2.1.0\"\n" + "version: \" - 5.1.1\"\n" +
"reason: Delete ignores the parent param" "reason: Delete ignores the parent param"
); );
SkipSection skipSection = SkipSection.parse(parser); SkipSection skipSection = SkipSection.parse(parser);
assertThat(skipSection, notNullValue()); assertThat(skipSection, notNullValue());
assertThat(skipSection.getLowerVersion(), equalTo(VersionUtils.getFirstVersion())); assertThat(skipSection.getLowerVersion(), equalTo(VersionUtils.getFirstVersion()));
assertThat(skipSection.getUpperVersion(), equalTo(Version.V_2_1_0)); assertThat(skipSection.getUpperVersion(), equalTo(Version.V_5_1_1_UNRELEASED));
assertThat(skipSection.getFeatures().size(), equalTo(0)); assertThat(skipSection.getFeatures().size(), equalTo(0));
assertThat(skipSection.getReason(), equalTo("Delete ignores the parent param")); assertThat(skipSection.getReason(), equalTo("Delete ignores the parent param"));
} }

View File

@ -56,7 +56,7 @@ public class TeardownSectionTests extends AbstractClientYamlTestFragmentParserTe
public void testParseWithSkip() throws Exception { public void testParseWithSkip() throws Exception {
parser = createParser(YamlXContent.yamlXContent, parser = createParser(YamlXContent.yamlXContent,
" - skip:\n" + " - skip:\n" +
" version: \"2.0.0 - 2.3.0\"\n" + " version: \"5.0.0 - 5.3.0\"\n" +
" reason: \"there is a reason\"\n" + " reason: \"there is a reason\"\n" +
" - do:\n" + " - do:\n" +
" delete:\n" + " delete:\n" +
@ -75,8 +75,8 @@ public class TeardownSectionTests extends AbstractClientYamlTestFragmentParserTe
TeardownSection section = TeardownSection.parse(parser); TeardownSection section = TeardownSection.parse(parser);
assertThat(section, notNullValue()); assertThat(section, notNullValue());
assertThat(section.getSkipSection().isEmpty(), equalTo(false)); assertThat(section.getSkipSection().isEmpty(), equalTo(false));
assertThat(section.getSkipSection().getLowerVersion(), equalTo(Version.V_2_0_0)); assertThat(section.getSkipSection().getLowerVersion(), equalTo(Version.V_5_0_0));
assertThat(section.getSkipSection().getUpperVersion(), equalTo(Version.V_2_3_0)); assertThat(section.getSkipSection().getUpperVersion(), equalTo(Version.V_5_3_0_UNRELEASED));
assertThat(section.getSkipSection().getReason(), equalTo("there is a reason")); assertThat(section.getSkipSection().getReason(), equalTo("there is a reason"));
assertThat(section.getDoSections().size(), equalTo(2)); assertThat(section.getDoSections().size(), equalTo(2));
assertThat(section.getDoSections().get(0).getApiCallSection().getApi(), equalTo("delete")); assertThat(section.getDoSections().get(0).getApiCallSection().getApi(), equalTo("delete"));

View File

@ -46,21 +46,22 @@ public class VersionUtilsTests extends ESTestCase {
assertTrue(got.onOrBefore(Version.CURRENT)); assertTrue(got.onOrBefore(Version.CURRENT));
// sub range // sub range
got = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha1); got = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0,
assertTrue(got.onOrAfter(Version.V_2_0_0)); Version.V_6_0_0_alpha1_UNRELEASED);
assertTrue(got.onOrBefore(Version.V_5_0_0_alpha1)); assertTrue(got.onOrAfter(Version.V_5_0_0));
assertTrue(got.onOrBefore(Version.V_6_0_0_alpha1_UNRELEASED));
// unbounded lower // unbounded lower
got = VersionUtils.randomVersionBetween(random(), null, Version.V_5_0_0_alpha1); got = VersionUtils.randomVersionBetween(random(), null, Version.V_6_0_0_alpha1_UNRELEASED);
assertTrue(got.onOrAfter(VersionUtils.getFirstVersion())); assertTrue(got.onOrAfter(VersionUtils.getFirstVersion()));
assertTrue(got.onOrBefore(Version.V_5_0_0_alpha1)); assertTrue(got.onOrBefore(Version.V_6_0_0_alpha1_UNRELEASED));
got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.allReleasedVersions().get(0)); got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.allReleasedVersions().get(0));
assertTrue(got.onOrAfter(VersionUtils.getFirstVersion())); assertTrue(got.onOrAfter(VersionUtils.getFirstVersion()));
assertTrue(got.onOrBefore(VersionUtils.allReleasedVersions().get(0))); assertTrue(got.onOrBefore(VersionUtils.allReleasedVersions().get(0)));
// unbounded upper // unbounded upper
got = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, null); got = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, null);
assertTrue(got.onOrAfter(Version.V_2_0_0)); assertTrue(got.onOrAfter(Version.V_5_0_0));
assertTrue(got.onOrBefore(Version.CURRENT)); assertTrue(got.onOrBefore(Version.CURRENT));
got = VersionUtils.randomVersionBetween(random(), VersionUtils.getPreviousVersion(), null); got = VersionUtils.randomVersionBetween(random(), VersionUtils.getPreviousVersion(), null);
assertTrue(got.onOrAfter(VersionUtils.getPreviousVersion())); assertTrue(got.onOrAfter(VersionUtils.getPreviousVersion()));
@ -71,8 +72,9 @@ public class VersionUtilsTests extends ESTestCase {
assertEquals(got, VersionUtils.getFirstVersion()); assertEquals(got, VersionUtils.getFirstVersion());
got = VersionUtils.randomVersionBetween(random(), Version.CURRENT, Version.CURRENT); got = VersionUtils.randomVersionBetween(random(), Version.CURRENT, Version.CURRENT);
assertEquals(got, Version.CURRENT); assertEquals(got, Version.CURRENT);
got = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0_alpha1, Version.V_5_0_0_alpha1); got = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0_alpha1_UNRELEASED,
assertEquals(got, Version.V_5_0_0_alpha1); Version.V_6_0_0_alpha1_UNRELEASED);
assertEquals(got, Version.V_6_0_0_alpha1_UNRELEASED);
// implicit range of one // implicit range of one
got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.getFirstVersion()); got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.getFirstVersion());