Version: Set version to 5.0.0-alpha1
Change version, required a minor fix in the RPM building. In case of a alpha/beta version, the release will contain alpha/beta as the RPM version cannot contains dashes/tildes.
This commit is contained in:
parent
7ecfa6e2ad
commit
b2573858b6
|
@ -1,4 +1,4 @@
|
|||
elasticsearch = 5.0.0
|
||||
elasticsearch = 5.0.0-alpha1
|
||||
lucene = 6.0.0-snapshot-f0aa4fc
|
||||
|
||||
# optional dependencies
|
||||
|
|
|
@ -64,9 +64,9 @@ public class Version {
|
|||
public static final Version V_2_2_1 = new Version(V_2_2_1_ID, org.apache.lucene.util.Version.LUCENE_5_4_1);
|
||||
public static final int V_2_3_0_ID = 2030099;
|
||||
public static final Version V_2_3_0 = new Version(V_2_3_0_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
|
||||
public static final int V_5_0_0_ID = 5000099;
|
||||
public static final Version V_5_0_0 = new Version(V_5_0_0_ID, org.apache.lucene.util.Version.LUCENE_6_0_0);
|
||||
public static final Version CURRENT = V_5_0_0;
|
||||
public static final int V_5_0_0_alpha1_ID = 5000001;
|
||||
public static final Version V_5_0_0_alpha1 = new Version(V_5_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_6_0_0);
|
||||
public static final Version CURRENT = V_5_0_0_alpha1;
|
||||
|
||||
static {
|
||||
assert CURRENT.luceneVersion.equals(org.apache.lucene.util.Version.LATEST) : "Version must be upgraded to ["
|
||||
|
@ -79,8 +79,8 @@ public class Version {
|
|||
|
||||
public static Version fromId(int id) {
|
||||
switch (id) {
|
||||
case V_5_0_0_ID:
|
||||
return V_5_0_0;
|
||||
case V_5_0_0_alpha1_ID:
|
||||
return V_5_0_0_alpha1;
|
||||
case V_2_3_0_ID:
|
||||
return V_2_3_0;
|
||||
case V_2_2_1_ID:
|
||||
|
|
|
@ -190,7 +190,7 @@ public class CommonStatsFlags implements Streamable, Cloneable {
|
|||
out.writeStringArrayNullable(groups);
|
||||
out.writeStringArrayNullable(fieldDataFields);
|
||||
out.writeStringArrayNullable(completionDataFields);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_0_0)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
out.writeBoolean(includeSegmentFileSizes);
|
||||
}
|
||||
}
|
||||
|
@ -208,7 +208,7 @@ public class CommonStatsFlags implements Streamable, Cloneable {
|
|||
groups = in.readStringArray();
|
||||
fieldDataFields = in.readStringArray();
|
||||
completionDataFields = in.readStringArray();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_0_0)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
includeSegmentFileSizes = in.readBoolean();
|
||||
} else {
|
||||
includeSegmentFileSizes = false;
|
||||
|
|
|
@ -266,7 +266,7 @@ public final class ShardRouting implements Streamable, ToXContent {
|
|||
return false;
|
||||
}
|
||||
|
||||
if (indexMetaData.activeAllocationIds(id()).isEmpty() && indexMetaData.getCreationVersion().onOrAfter(Version.V_5_0_0)) {
|
||||
if (indexMetaData.activeAllocationIds(id()).isEmpty() && indexMetaData.getCreationVersion().onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
// when no shards with this id have ever been active for this index
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -118,7 +118,7 @@ public abstract class PrimaryShardAllocator extends AbstractComponent {
|
|||
final boolean enoughAllocationsFound;
|
||||
|
||||
if (lastActiveAllocationIds.isEmpty()) {
|
||||
assert Version.indexCreated(indexMetaData.getSettings()).before(Version.V_5_0_0) : "trying to allocated a primary with an empty allocation id set, but index is new";
|
||||
assert Version.indexCreated(indexMetaData.getSettings()).before(Version.V_5_0_0_alpha1) : "trying to allocated a primary with an empty allocation id set, but index is new";
|
||||
// when we load an old index (after upgrading cluster) or restore a snapshot of an old index
|
||||
// fall back to old version-based allocation mode
|
||||
// Note that once the shard has been active, lastActiveAllocationIds will be non-empty
|
||||
|
@ -128,7 +128,7 @@ public abstract class PrimaryShardAllocator extends AbstractComponent {
|
|||
} else {
|
||||
enoughAllocationsFound = isEnoughVersionBasedAllocationsFound(indexMetaData, nodeShardsResult);
|
||||
}
|
||||
logger.debug("[{}][{}]: version-based allocation for pre-{} index found {} allocations of {}", shard.index(), shard.id(), Version.V_5_0_0, nodeShardsResult.allocationsFound, shard);
|
||||
logger.debug("[{}][{}]: version-based allocation for pre-{} index found {} allocations of {}", shard.index(), shard.id(), Version.V_5_0_0_alpha1, nodeShardsResult.allocationsFound, shard);
|
||||
} else {
|
||||
assert lastActiveAllocationIds.isEmpty() == false;
|
||||
// use allocation ids to select nodes
|
||||
|
|
|
@ -127,7 +127,7 @@ public class AnalysisService extends AbstractIndexComponent implements Closeable
|
|||
}
|
||||
if (analyzers.containsKey("default_index")) {
|
||||
final Version createdVersion = indexSettings.getIndexVersionCreated();
|
||||
if (createdVersion.onOrAfter(Version.V_5_0_0)) {
|
||||
if (createdVersion.onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
throw new IllegalArgumentException("setting [index.analysis.analyzer.default_index] is not supported anymore, use [index.analysis.analyzer.default] instead for index [" + index().getName() + "]");
|
||||
} else {
|
||||
deprecationLogger.deprecated("setting [index.analysis.analyzer.default_index] is deprecated, use [index.analysis.analyzer.default] instead for index [{}]", index().getName());
|
||||
|
|
|
@ -348,7 +348,7 @@ public class SegmentsStats implements Streamable, ToXContent {
|
|||
indexWriterMaxMemoryInBytes = in.readLong();
|
||||
bitsetMemoryInBytes = in.readLong();
|
||||
|
||||
if (in.getVersion().onOrAfter(Version.V_5_0_0)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
int size = in.readVInt();
|
||||
ImmutableOpenMap.Builder<String, Long> map = ImmutableOpenMap.builder(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
|
@ -376,7 +376,7 @@ public class SegmentsStats implements Streamable, ToXContent {
|
|||
out.writeLong(indexWriterMaxMemoryInBytes);
|
||||
out.writeLong(bitsetMemoryInBytes);
|
||||
|
||||
if (out.getVersion().onOrAfter(Version.V_5_0_0)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
out.writeVInt(fileSizes.size());
|
||||
for (Iterator<ObjectObjectCursor<String, Long>> it = fileSizes.iterator(); it.hasNext();) {
|
||||
ObjectObjectCursor<String, Long> entry = it.next();
|
||||
|
|
|
@ -219,7 +219,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
}
|
||||
|
||||
protected boolean defaultDocValues(Version indexCreated) {
|
||||
if (indexCreated.onOrAfter(Version.V_5_0_0)) {
|
||||
if (indexCreated.onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
// add doc values by default to keyword (boolean, numerics, etc.) fields
|
||||
return fieldType.tokenized() == false;
|
||||
} else {
|
||||
|
@ -229,7 +229,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
|
||||
protected void setupFieldType(BuilderContext context) {
|
||||
fieldType.setName(buildFullName(context));
|
||||
if (context.indexCreatedVersion().before(Version.V_5_0_0)) {
|
||||
if (context.indexCreatedVersion().before(Version.V_5_0_0_alpha1)) {
|
||||
fieldType.setOmitNorms(fieldType.omitNorms() && fieldType.boost() == 1.0f);
|
||||
}
|
||||
if (fieldType.indexAnalyzer() == null && fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE) {
|
||||
|
@ -289,7 +289,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
if (!customBoost()
|
||||
// don't set boosts eg. on dv fields
|
||||
&& field.fieldType().indexOptions() != IndexOptions.NONE
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(fieldType().boost());
|
||||
}
|
||||
context.doc().add(field);
|
||||
|
|
|
@ -341,7 +341,7 @@ public abstract class MappedFieldType extends FieldType {
|
|||
public Query termQuery(Object value, @Nullable QueryShardContext context) {
|
||||
TermQuery query = new TermQuery(createTerm(value));
|
||||
if (boost == 1f ||
|
||||
(context != null && context.indexVersionCreated().before(Version.V_5_0_0))) {
|
||||
(context != null && context.indexVersionCreated().before(Version.V_5_0_0_alpha1))) {
|
||||
return query;
|
||||
}
|
||||
return new BoostQuery(query, boost);
|
||||
|
|
|
@ -261,7 +261,7 @@ public class ByteFieldMapper extends NumberFieldMapper {
|
|||
context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost);
|
||||
}
|
||||
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
Byte objValue = fieldType().nullValue();
|
||||
|
@ -294,7 +294,7 @@ public class ByteFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
CustomByteNumericField field = new CustomByteNumericField(value, fieldType());
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(boost);
|
||||
}
|
||||
fields.add(field);
|
||||
|
|
|
@ -488,7 +488,7 @@ public class DateFieldMapper extends NumberFieldMapper {
|
|||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
dateAsString = parser.text();
|
||||
} else if (token == XContentParser.Token.START_OBJECT
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
|
@ -523,7 +523,7 @@ public class DateFieldMapper extends NumberFieldMapper {
|
|||
if (value != null) {
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
CustomLongNumericField field = new CustomLongNumericField(value, fieldType());
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(boost);
|
||||
}
|
||||
fields.add(field);
|
||||
|
|
|
@ -253,7 +253,7 @@ public class DoubleFieldMapper extends NumberFieldMapper {
|
|||
context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost);
|
||||
}
|
||||
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
Double objValue = fieldType().nullValue();
|
||||
|
@ -287,7 +287,7 @@ public class DoubleFieldMapper extends NumberFieldMapper {
|
|||
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
CustomDoubleNumericField field = new CustomDoubleNumericField(value, fieldType());
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(boost);
|
||||
}
|
||||
fields.add(field);
|
||||
|
|
|
@ -265,7 +265,7 @@ public class FloatFieldMapper extends NumberFieldMapper {
|
|||
context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost);
|
||||
}
|
||||
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
Float objValue = fieldType().nullValue();
|
||||
|
@ -299,7 +299,7 @@ public class FloatFieldMapper extends NumberFieldMapper {
|
|||
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
CustomFloatNumericField field = new CustomFloatNumericField(value, fieldType());
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(boost);
|
||||
}
|
||||
fields.add(field);
|
||||
|
|
|
@ -270,7 +270,7 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
|||
context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost);
|
||||
}
|
||||
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
Integer objValue = fieldType().nullValue();
|
||||
|
@ -307,7 +307,7 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
|||
protected void addIntegerFields(ParseContext context, List<Field> fields, int value, float boost) {
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
CustomIntegerNumericField field = new CustomIntegerNumericField(value, fieldType());
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(boost);
|
||||
}
|
||||
fields.add(field);
|
||||
|
|
|
@ -258,7 +258,7 @@ public class LongFieldMapper extends NumberFieldMapper {
|
|||
context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost);
|
||||
}
|
||||
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
Long objValue = fieldType().nullValue();
|
||||
|
@ -291,7 +291,7 @@ public class LongFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
CustomLongNumericField field = new CustomLongNumericField(value, fieldType());
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(boost);
|
||||
}
|
||||
fields.add(field);
|
||||
|
|
|
@ -266,7 +266,7 @@ public class ShortFieldMapper extends NumberFieldMapper {
|
|||
context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost);
|
||||
}
|
||||
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
Short objValue = fieldType().nullValue();
|
||||
|
@ -299,7 +299,7 @@ public class ShortFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
CustomShortNumericField field = new CustomShortNumericField(value, fieldType());
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(boost);
|
||||
}
|
||||
fields.add(field);
|
||||
|
|
|
@ -193,7 +193,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
|
||||
@Override
|
||||
public Mapper.Builder parse(String fieldName, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0)) {
|
||||
if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
// Automatically upgrade simple mappings for ease of upgrade, otherwise fail
|
||||
if (SUPPORTED_PARAMETERS_FOR_AUTO_UPGRADE.containsAll(node.keySet())) {
|
||||
deprecationLogger.deprecated("The [string] field is deprecated, please use [text] or [keyword] instead on [{}]",
|
||||
|
@ -490,7 +490,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
int positionIncrementGap, int ignoreAbove,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
|
||||
if (Version.indexCreated(indexSettings).onOrAfter(Version.V_5_0_0)) {
|
||||
if (Version.indexCreated(indexSettings).onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
throw new IllegalArgumentException("The [string] type is removed in 5.0. You should now use either a [text] "
|
||||
+ "or [keyword] field instead for field [" + fieldType.name() + "]");
|
||||
}
|
||||
|
@ -573,7 +573,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
Field field = new Field(fieldType().name(), valueAndBoost.value(), fieldType());
|
||||
if (valueAndBoost.boost() != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
if (valueAndBoost.boost() != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(valueAndBoost.boost());
|
||||
}
|
||||
fields.add(field);
|
||||
|
@ -600,7 +600,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
return new ValueAndBoost(nullValue, defaultBoost);
|
||||
}
|
||||
if (parser.currentToken() == XContentParser.Token.START_OBJECT
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
String value = nullValue;
|
||||
|
|
|
@ -250,7 +250,7 @@ public class TypeParsers {
|
|||
} else if (propName.equals("boost")) {
|
||||
builder.boost(nodeFloatValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (parserContext.indexVersionCreated().before(Version.V_5_0_0)
|
||||
} else if (parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha1)
|
||||
&& parseNorms(builder, propName, propNode, parserContext)) {
|
||||
iterator.remove();
|
||||
} else if (propName.equals("index_options")) {
|
||||
|
@ -434,7 +434,7 @@ public class TypeParsers {
|
|||
}
|
||||
|
||||
private static SimilarityProvider resolveSimilarity(Mapper.TypeParser.ParserContext parserContext, String name, String value) {
|
||||
if (parserContext.indexVersionCreated().before(Version.V_5_0_0) && "default".equals(value)) {
|
||||
if (parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha1) && "default".equals(value)) {
|
||||
// "default" similarity has been renamed into "classic" in 3.x.
|
||||
value = SimilarityService.DEFAULT_SIMILARITY;
|
||||
}
|
||||
|
|
|
@ -46,7 +46,6 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
|||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -454,7 +453,7 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
}
|
||||
for (Field field : fields) {
|
||||
if (!customBoost() &&
|
||||
fieldType.boost() != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
fieldType.boost() != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(fieldType().boost());
|
||||
}
|
||||
context.doc().add(field);
|
||||
|
|
|
@ -124,7 +124,7 @@ public class SourceFieldMapper extends MetadataFieldMapper {
|
|||
if (fieldName.equals("enabled")) {
|
||||
builder.enabled(lenientNodeBooleanValue(fieldNode));
|
||||
iterator.remove();
|
||||
} else if ("format".equals(fieldName) && parserContext.indexVersionCreated().before(Version.V_5_0_0)) {
|
||||
} else if ("format".equals(fieldName) && parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha1)) {
|
||||
// ignore on old indices, reject on and after 5.0
|
||||
iterator.remove();
|
||||
} else if (fieldName.equals("includes")) {
|
||||
|
|
|
@ -322,7 +322,7 @@ public class IpFieldMapper extends NumberFieldMapper {
|
|||
final long value = ipToLong(ipAsString);
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
CustomLongNumericField field = new CustomLongNumericField(value, fieldType());
|
||||
if (fieldType.boost() != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
if (fieldType.boost() != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(fieldType().boost());
|
||||
}
|
||||
fields.add(field);
|
||||
|
|
|
@ -100,7 +100,7 @@ public class DynamicTemplate implements ToXContent {
|
|||
matchPattern = entry.getValue().toString();
|
||||
} else if ("mapping".equals(propName)) {
|
||||
mapping = (Map<String, Object>) entry.getValue();
|
||||
} else if (indexVersionCreated.onOrAfter(Version.V_5_0_0)) {
|
||||
} else if (indexVersionCreated.onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
// unknown parameters were ignored before but still carried through serialization
|
||||
// so we need to ignore them at parsing time for old indices
|
||||
throw new IllegalArgumentException("Illegal dynamic template parameter: [" + propName + "]");
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.apache.lucene.index.StoredFieldVisitor;
|
|||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
|
@ -37,11 +36,9 @@ import org.elasticsearch.common.bytes.BytesArray;
|
|||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.cache.Cache;
|
||||
import org.elasticsearch.common.cache.CacheBuilder;
|
||||
import org.elasticsearch.common.lucene.index.ElasticsearchLeafReader;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
@ -143,7 +140,7 @@ public final class PercolatorQueryCache extends AbstractIndexComponent
|
|||
}
|
||||
|
||||
IntObjectHashMap<Query> queries = new IntObjectHashMap<>();
|
||||
boolean legacyLoading = indexVersionCreated.before(Version.V_5_0_0);
|
||||
boolean legacyLoading = indexVersionCreated.before(Version.V_5_0_0_alpha1);
|
||||
PostingsEnum postings = leafReader.postings(new Term(TypeFieldMapper.NAME, PercolatorFieldMapper.TYPE_NAME), PostingsEnum.NONE);
|
||||
if (postings != null) {
|
||||
if (legacyLoading) {
|
||||
|
|
|
@ -301,7 +301,7 @@ public class PercolatorQueryBuilder extends AbstractQueryBuilder<PercolatorQuery
|
|||
documentType, registry, document, docSearcher, percolateTypeQuery
|
||||
);
|
||||
Settings indexSettings = registry.getIndexSettings().getSettings();
|
||||
if (indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, null).onOrAfter(Version.V_5_0_0)) {
|
||||
if (indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, null).onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
builder.extractQueryTermsQuery(
|
||||
PercolatorFieldMapper.EXTRACTED_TERMS_FULL_FIELD_NAME, PercolatorFieldMapper.UNKNOWN_QUERY_FULL_FIELD_NAME
|
||||
);
|
||||
|
|
|
@ -65,7 +65,7 @@ public final class SimilarityService extends AbstractIndexComponent {
|
|||
for (Map.Entry<String, Settings> entry : similaritySettings.entrySet()) {
|
||||
String name = entry.getKey();
|
||||
// Starting with v5.0 indices, it should no longer be possible to redefine built-in similarities
|
||||
if(BUILT_IN.containsKey(name) && indexSettings.getIndexVersionCreated().onOrAfter(Version.V_5_0_0)) {
|
||||
if(BUILT_IN.containsKey(name) && indexSettings.getIndexVersionCreated().onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
throw new IllegalArgumentException("Cannot redefine built-in Similarity [" + name + "]");
|
||||
}
|
||||
Settings settings = entry.getValue();
|
||||
|
@ -83,7 +83,7 @@ public final class SimilarityService extends AbstractIndexComponent {
|
|||
}
|
||||
for (Map.Entry<String, SimilarityProvider> entry : addSimilarities(similaritySettings, DEFAULTS).entrySet()) {
|
||||
// Avoid overwriting custom providers for indices older that v5.0
|
||||
if (providers.containsKey(entry.getKey()) && indexSettings.getIndexVersionCreated().before(Version.V_5_0_0)) {
|
||||
if (providers.containsKey(entry.getKey()) && indexSettings.getIndexVersionCreated().before(Version.V_5_0_0_alpha1)) {
|
||||
continue;
|
||||
}
|
||||
providers.put(entry.getKey(), entry.getValue());
|
||||
|
|
|
@ -32,7 +32,7 @@ import java.util.Locale;
|
|||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.Version.V_2_2_0;
|
||||
import static org.elasticsearch.Version.V_5_0_0;
|
||||
import static org.elasticsearch.Version.V_5_0_0_alpha1;
|
||||
import static org.elasticsearch.test.VersionUtils.randomVersion;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
@ -42,21 +42,21 @@ import static org.hamcrest.Matchers.sameInstance;
|
|||
public class VersionTests extends ESTestCase {
|
||||
|
||||
public void testVersionComparison() throws Exception {
|
||||
assertThat(V_2_2_0.before(V_5_0_0), is(true));
|
||||
assertThat(V_2_2_0.before(V_5_0_0_alpha1), is(true));
|
||||
assertThat(V_2_2_0.before(V_2_2_0), is(false));
|
||||
assertThat(V_5_0_0.before(V_2_2_0), is(false));
|
||||
assertThat(V_5_0_0_alpha1.before(V_2_2_0), is(false));
|
||||
|
||||
assertThat(V_2_2_0.onOrBefore(V_5_0_0), is(true));
|
||||
assertThat(V_2_2_0.onOrBefore(V_5_0_0_alpha1), is(true));
|
||||
assertThat(V_2_2_0.onOrBefore(V_2_2_0), is(true));
|
||||
assertThat(V_5_0_0.onOrBefore(V_2_2_0), is(false));
|
||||
assertThat(V_5_0_0_alpha1.onOrBefore(V_2_2_0), is(false));
|
||||
|
||||
assertThat(V_2_2_0.after(V_5_0_0), is(false));
|
||||
assertThat(V_2_2_0.after(V_5_0_0_alpha1), is(false));
|
||||
assertThat(V_2_2_0.after(V_2_2_0), is(false));
|
||||
assertThat(V_5_0_0.after(V_2_2_0), is(true));
|
||||
assertThat(V_5_0_0_alpha1.after(V_2_2_0), is(true));
|
||||
|
||||
assertThat(V_2_2_0.onOrAfter(V_5_0_0), is(false));
|
||||
assertThat(V_2_2_0.onOrAfter(V_5_0_0_alpha1), is(false));
|
||||
assertThat(V_2_2_0.onOrAfter(V_2_2_0), is(true));
|
||||
assertThat(V_5_0_0.onOrAfter(V_2_2_0), is(true));
|
||||
assertThat(V_5_0_0_alpha1.onOrAfter(V_2_2_0), is(true));
|
||||
|
||||
assertTrue(Version.fromString("5.0.0-alpha2").onOrAfter(Version.fromString("5.0.0-alpha1")));
|
||||
assertTrue(Version.fromString("5.0.0").onOrAfter(Version.fromString("5.0.0-beta2")));
|
||||
|
@ -133,7 +133,7 @@ public class VersionTests extends ESTestCase {
|
|||
|
||||
public void testIndexCreatedVersion() {
|
||||
// an actual index has a IndexMetaData.SETTING_INDEX_UUID
|
||||
final Version version = randomFrom(Version.V_2_0_0, Version.V_2_3_0, Version.V_5_0_0);
|
||||
final Version version = randomFrom(Version.V_2_0_0, Version.V_2_3_0, Version.V_5_0_0_alpha1);
|
||||
assertEquals(version, Version.indexCreated(Settings.builder().put(IndexMetaData.SETTING_INDEX_UUID, "foo").put(IndexMetaData.SETTING_VERSION_CREATED, version).build()));
|
||||
}
|
||||
|
||||
|
@ -142,13 +142,13 @@ public class VersionTests extends ESTestCase {
|
|||
assertThat(Version.V_2_1_0.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0));
|
||||
assertThat(Version.V_2_2_0.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0));
|
||||
assertThat(Version.V_2_3_0.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0));
|
||||
assertThat(Version.V_5_0_0.minimumCompatibilityVersion(), equalTo(Version.V_5_0_0));
|
||||
assertThat(Version.V_5_0_0_alpha1.minimumCompatibilityVersion(), equalTo(Version.V_5_0_0_alpha1));
|
||||
}
|
||||
|
||||
public void testToString() {
|
||||
// with 2.0.beta we lowercase
|
||||
assertEquals("2.0.0-beta1", Version.V_2_0_0_beta1.toString());
|
||||
assertEquals("5.0.0", Version.V_5_0_0.toString());
|
||||
assertEquals("5.0.0-alpha1", Version.V_5_0_0_alpha1.toString());
|
||||
assertEquals("2.3.0", Version.V_2_3_0.toString());
|
||||
assertEquals("0.90.0.Beta1", Version.fromString("0.90.0.Beta1").toString());
|
||||
assertEquals("1.0.0.Beta1", Version.fromString("1.0.0.Beta1").toString());
|
||||
|
@ -240,6 +240,8 @@ public class VersionTests extends ESTestCase {
|
|||
number = number.replace("-beta", "_beta");
|
||||
} else if (v.isRC()) {
|
||||
number = number.replace("-rc", "_rc");
|
||||
} else if (v.isAlpha()) {
|
||||
number = number.replace("-alpha", "_alpha");
|
||||
}
|
||||
assertEquals("V_" + number.replace('.', '_'), constantName);
|
||||
} else {
|
||||
|
|
|
@ -275,7 +275,7 @@ public class ZenDiscoveryIT extends ESIntegTestCase {
|
|||
Settings nodeSettings = Settings.settingsBuilder()
|
||||
.put("discovery.type", "zen") // <-- To override the local setting if set externally
|
||||
.build();
|
||||
String nodeName = internalCluster().startNode(nodeSettings, Version.V_5_0_0);
|
||||
String nodeName = internalCluster().startNode(nodeSettings, Version.V_5_0_0_alpha1);
|
||||
ZenDiscovery zenDiscovery = (ZenDiscovery) internalCluster().getInstance(Discovery.class, nodeName);
|
||||
ClusterService clusterService = internalCluster().getInstance(ClusterService.class, nodeName);
|
||||
DiscoveryNode node = new DiscoveryNode("_node_id", new InetSocketTransportAddress(InetAddress.getByName("0.0.0.0"), 0), Version.V_2_0_0);
|
||||
|
@ -292,13 +292,13 @@ public class ZenDiscoveryIT extends ESIntegTestCase {
|
|||
});
|
||||
|
||||
assertThat(holder.get(), notNullValue());
|
||||
assertThat(holder.get().getMessage(), equalTo("Can't handle join request from a node with a version [2.0.0] that is lower than the minimum compatible version [" + Version.V_5_0_0.minimumCompatibilityVersion() + "]"));
|
||||
assertThat(holder.get().getMessage(), equalTo("Can't handle join request from a node with a version [2.0.0] that is lower than the minimum compatible version [" + Version.V_5_0_0_alpha1.minimumCompatibilityVersion() + "]"));
|
||||
}
|
||||
|
||||
public void testJoinElectedMaster_incompatibleMinVersion() {
|
||||
ElectMasterService electMasterService = new ElectMasterService(Settings.EMPTY, Version.V_5_0_0);
|
||||
ElectMasterService electMasterService = new ElectMasterService(Settings.EMPTY, Version.V_5_0_0_alpha1);
|
||||
|
||||
DiscoveryNode node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_5_0_0);
|
||||
DiscoveryNode node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_5_0_0_alpha1);
|
||||
assertThat(electMasterService.electMaster(Collections.singletonList(node)), sameInstance(node));
|
||||
node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_2_0_0);
|
||||
assertThat("Can't join master because version 2.0.0 is lower than the minimum compatable version 5.0.0 can support", electMasterService.electMaster(Collections.singletonList(node)), nullValue());
|
||||
|
|
|
@ -76,7 +76,7 @@ public class AnalysisServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testOverrideDefaultIndexAnalyzer() {
|
||||
Version version = VersionUtils.randomVersionBetween(getRandom(), Version.V_5_0_0, Version.CURRENT);
|
||||
Version version = VersionUtils.randomVersionBetween(getRandom(), Version.V_5_0_0_alpha1, Version.CURRENT);
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
try {
|
||||
AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
|
||||
|
@ -90,7 +90,7 @@ public class AnalysisServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testBackCompatOverrideDefaultIndexAnalyzer() {
|
||||
Version version = VersionUtils.randomVersionBetween(getRandom(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_0_0));
|
||||
Version version = VersionUtils.randomVersionBetween(getRandom(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1));
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings),
|
||||
Collections.singletonMap("default_index", analyzerProvider("default_index")),
|
||||
|
@ -112,7 +112,7 @@ public class AnalysisServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testBackCompatOverrideDefaultIndexAndSearchAnalyzer() {
|
||||
Version version = VersionUtils.randomVersionBetween(getRandom(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_0_0));
|
||||
Version version = VersionUtils.randomVersionBetween(getRandom(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1));
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
Map<String, AnalyzerProvider> analyzers = new HashMap<>();
|
||||
analyzers.put("default_index", analyzerProvider("default_index"));
|
||||
|
|
|
@ -39,7 +39,7 @@ public class DynamicTemplateTests extends ESTestCase {
|
|||
templateDef.put("random_param", "random_value");
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0));
|
||||
() -> DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1));
|
||||
assertEquals("Illegal dynamic template parameter: [random_param]", e.getMessage());
|
||||
|
||||
// but no issues on 2.x for bw compat
|
||||
|
@ -54,7 +54,7 @@ public class DynamicTemplateTests extends ESTestCase {
|
|||
Map<String, Object> templateDef = new HashMap<>();
|
||||
templateDef.put("match_mapping_type", "string");
|
||||
templateDef.put("mapping", Collections.singletonMap("store", true));
|
||||
DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0);
|
||||
DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1);
|
||||
XContentBuilder builder = JsonXContent.contentBuilder();
|
||||
template.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("{\"match_mapping_type\":\"string\",\"mapping\":{\"store\":true}}", builder.string());
|
||||
|
@ -64,7 +64,7 @@ public class DynamicTemplateTests extends ESTestCase {
|
|||
templateDef.put("match", "*name");
|
||||
templateDef.put("unmatch", "first_name");
|
||||
templateDef.put("mapping", Collections.singletonMap("store", true));
|
||||
template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0);
|
||||
template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1);
|
||||
builder = JsonXContent.contentBuilder();
|
||||
template.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("{\"match\":\"*name\",\"unmatch\":\"first_name\",\"mapping\":{\"store\":true}}", builder.string());
|
||||
|
@ -74,7 +74,7 @@ public class DynamicTemplateTests extends ESTestCase {
|
|||
templateDef.put("path_match", "*name");
|
||||
templateDef.put("path_unmatch", "first_name");
|
||||
templateDef.put("mapping", Collections.singletonMap("store", true));
|
||||
template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0);
|
||||
template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1);
|
||||
builder = JsonXContent.contentBuilder();
|
||||
template.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("{\"path_match\":\"*name\",\"path_unmatch\":\"first_name\",\"mapping\":{\"store\":true}}",
|
||||
|
@ -85,7 +85,7 @@ public class DynamicTemplateTests extends ESTestCase {
|
|||
templateDef.put("match", "^a$");
|
||||
templateDef.put("match_pattern", "regex");
|
||||
templateDef.put("mapping", Collections.singletonMap("store", true));
|
||||
template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0);
|
||||
template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1);
|
||||
builder = JsonXContent.contentBuilder();
|
||||
template.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("{\"match\":\"^a$\",\"match_pattern\":\"regex\",\"mapping\":{\"store\":true}}", builder.string());
|
||||
|
|
|
@ -25,7 +25,14 @@ task buildRpm(type: Rpm) {
|
|||
packageGroup 'Application/Internet'
|
||||
prefix '/usr'
|
||||
packager 'Elasticsearch'
|
||||
release '1'
|
||||
if (version.contains('~')) {
|
||||
def tokenized = version.tokenize('~')
|
||||
version tokenized[0]
|
||||
release tokenized[1]
|
||||
} else {
|
||||
version version
|
||||
release '1'
|
||||
}
|
||||
arch NOARCH
|
||||
os LINUX
|
||||
// TODO ospackage doesn't support icon but we used to have one
|
||||
|
|
|
@ -18,6 +18,6 @@ integTest {
|
|||
cluster {
|
||||
numNodes = 2
|
||||
numBwcNodes = 1
|
||||
bwcVersion = "5.0.0-SNAPSHOT" // this is the same as the current version until we released the first RC
|
||||
bwcVersion = "5.0.0-alpha1-SNAPSHOT" // this is the same as the current version until we released the first RC
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,14 +46,14 @@ public class VersionUtilsTests extends ESTestCase {
|
|||
assertTrue(got.onOrBefore(Version.CURRENT));
|
||||
|
||||
// sub range
|
||||
got = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0);
|
||||
got = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha1);
|
||||
assertTrue(got.onOrAfter(Version.V_2_0_0));
|
||||
assertTrue(got.onOrBefore(Version.V_5_0_0));
|
||||
assertTrue(got.onOrBefore(Version.V_5_0_0_alpha1));
|
||||
|
||||
// unbounded lower
|
||||
got = VersionUtils.randomVersionBetween(random(), null, Version.V_5_0_0);
|
||||
got = VersionUtils.randomVersionBetween(random(), null, Version.V_5_0_0_alpha1);
|
||||
assertTrue(got.onOrAfter(VersionUtils.getFirstVersion()));
|
||||
assertTrue(got.onOrBefore(Version.V_5_0_0));
|
||||
assertTrue(got.onOrBefore(Version.V_5_0_0_alpha1));
|
||||
got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.allVersions().get(0));
|
||||
assertTrue(got.onOrAfter(VersionUtils.getFirstVersion()));
|
||||
assertTrue(got.onOrBefore(VersionUtils.allVersions().get(0)));
|
||||
|
@ -71,8 +71,8 @@ public class VersionUtilsTests extends ESTestCase {
|
|||
assertEquals(got, VersionUtils.getFirstVersion());
|
||||
got = VersionUtils.randomVersionBetween(random(), Version.CURRENT, Version.CURRENT);
|
||||
assertEquals(got, Version.CURRENT);
|
||||
got = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.V_5_0_0);
|
||||
assertEquals(got, Version.V_5_0_0);
|
||||
got = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0_alpha1, Version.V_5_0_0_alpha1);
|
||||
assertEquals(got, Version.V_5_0_0_alpha1);
|
||||
|
||||
// implicit range of one
|
||||
got = VersionUtils.randomVersionBetween(random(), null, VersionUtils.getFirstVersion());
|
||||
|
|
Loading…
Reference in New Issue