Make 2.0.0.beta1-SNAPSHOT the current version.

Today everything is tight to having the next version as the latest.
In order to work towards 2.0.0.beta1 we need to fix all the usage of
2.0.0-SNAPSHOT to reflect the version we will release soon.
Usually we do this on the release branch but to simplify things I wanna
keep this on master for now and move to 2.1.0-SNAPSHOT on master once
we created a 2.0 branch.

Closes #12148
This commit is contained in:
Simon Willnauer 2015-07-09 12:52:02 +02:00
parent 26df78a90a
commit e0708813a9
59 changed files with 125 additions and 108 deletions

View File

@ -6,7 +6,7 @@
<parent>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch-parent</artifactId>
<version>2.0.0-SNAPSHOT</version>
<version>2.0.0.beta1-SNAPSHOT</version>
</parent>
<groupId>org.elasticsearch</groupId>

View File

@ -244,10 +244,10 @@ public class Version {
public static final Version V_1_6_1 = new Version(V_1_6_1_ID, true, org.apache.lucene.util.Version.LUCENE_4_10_4);
public static final int V_1_7_0_ID = 1070099;
public static final Version V_1_7_0 = new Version(V_1_7_0_ID, true, org.apache.lucene.util.Version.LUCENE_4_10_4);
public static final int V_2_0_0_ID = 2000099;
public static final Version V_2_0_0 = new Version(V_2_0_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_2_1);
public static final int V_2_0_0_beta1_ID = 2000001;
public static final Version V_2_0_0_beta1 = new Version(V_2_0_0_beta1_ID, true, org.apache.lucene.util.Version.LUCENE_5_2_1);
public static final Version CURRENT = V_2_0_0;
public static final Version CURRENT = V_2_0_0_beta1;
static {
assert CURRENT.luceneVersion.equals(Lucene.VERSION) : "Version must be upgraded to [" + Lucene.VERSION + "] is still set to [" + CURRENT.luceneVersion + "]";
@ -259,8 +259,8 @@ public class Version {
public static Version fromId(int id) {
switch (id) {
case V_2_0_0_ID:
return V_2_0_0;
case V_2_0_0_beta1_ID:
return V_2_0_0_beta1;
case V_1_7_0_ID:
return V_1_7_0;
case V_1_6_1_ID:
@ -515,7 +515,7 @@ public class Version {
int build = 99;
if (parts.length == 4) {
String buildStr = parts[3];
if (buildStr.startsWith("Beta")) {
if (buildStr.startsWith("Beta") || buildStr.startsWith("beta")) {
build = Integer.parseInt(buildStr.substring(4));
}
if (buildStr.startsWith("RC")) {
@ -589,8 +589,13 @@ public class Version {
public String number() {
StringBuilder sb = new StringBuilder();
sb.append(major).append('.').append(minor).append('.').append(revision);
if (build < 50) {
sb.append(".Beta").append(build);
if (isBeta()) {
if (major >= 2) {
sb.append(".beta");
} else {
sb.append(".Beta");
}
sb.append(build);
} else if (build < 99) {
sb.append(".RC").append(build - 50);
}
@ -635,6 +640,10 @@ public class Version {
return id;
}
public boolean isBeta() {
return build < 50;
}
public static class Module extends AbstractModule {
private final Version version;

View File

@ -167,10 +167,10 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
Version version) throws TimestampParsingException {
try {
// no need for unix timestamp parsing in 2.x
FormatDateTimeFormatter formatter = version.onOrAfter(Version.V_2_0_0) ? dateTimeFormatter : EPOCH_MILLIS_PARSER;
FormatDateTimeFormatter formatter = version.onOrAfter(Version.V_2_0_0_beta1) ? dateTimeFormatter : EPOCH_MILLIS_PARSER;
return Long.toString(formatter.parser().parseMillis(timestampAsString));
} catch (RuntimeException e) {
if (version.before(Version.V_2_0_0)) {
if (version.before(Version.V_2_0_0_beta1)) {
try {
return Long.toString(dateTimeFormatter.parser().parseMillis(timestampAsString));
} catch (RuntimeException e1) {

View File

@ -124,7 +124,7 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
*/
private IndexMetaData upgradeLegacyRoutingSettings(IndexMetaData indexMetaData) throws Exception {
if (indexMetaData.settings().get(IndexMetaData.SETTING_LEGACY_ROUTING_HASH_FUNCTION) == null
&& indexMetaData.getCreationVersion().before(Version.V_2_0_0)) {
&& indexMetaData.getCreationVersion().before(Version.V_2_0_0_beta1)) {
// these settings need an upgrade
Settings indexSettings = Settings.builder().put(indexMetaData.settings())
.put(IndexMetaData.SETTING_LEGACY_ROUTING_HASH_FUNCTION, pre20HashFunction)
@ -134,7 +134,7 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
.version(indexMetaData.version())
.settings(indexSettings)
.build();
} else if (indexMetaData.getCreationVersion().onOrAfter(Version.V_2_0_0)) {
} else if (indexMetaData.getCreationVersion().onOrAfter(Version.V_2_0_0_beta1)) {
if (indexMetaData.getSettings().get(IndexMetaData.SETTING_LEGACY_ROUTING_HASH_FUNCTION) != null
|| indexMetaData.getSettings().get(IndexMetaData.SETTING_LEGACY_ROUTING_USE_TYPE) != null) {
throw new IllegalStateException("Indices created on or after 2.0 should NOT contain [" + IndexMetaData.SETTING_LEGACY_ROUTING_HASH_FUNCTION
@ -188,7 +188,7 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
* missing units.
*/
private IndexMetaData addDefaultUnitsIfNeeded(IndexMetaData indexMetaData) {
if (indexMetaData.getCreationVersion().before(Version.V_2_0_0)) {
if (indexMetaData.getCreationVersion().before(Version.V_2_0_0_beta1)) {
// TODO: can we somehow only do this *once* for a pre-2.0 index? Maybe we could stuff a "fake marker setting" here? Seems hackish...
// Created lazily if we find any settings that are missing units:
Settings settings = indexMetaData.settings();

View File

@ -396,7 +396,7 @@ public class MetaDataMappingService extends AbstractComponent {
// For example in MapperService we can't distinguish between a create index api call
// and a put mapping api call, so we don't which type did exist before.
// Also the order of the mappings may be backwards.
if (Version.indexCreated(indexService.getIndexSettings()).onOrAfter(Version.V_2_0_0) && newMapper.parentFieldMapper().active()) {
if (Version.indexCreated(indexService.getIndexSettings()).onOrAfter(Version.V_2_0_0_beta1) && newMapper.parentFieldMapper().active()) {
IndexMetaData indexMetaData = currentState.metaData().index(index);
for (ObjectCursor<MappingMetaData> mapping : indexMetaData.mappings().values()) {
if (newMapper.parentFieldMapper().type().equals(mapping.value.type())) {

View File

@ -248,7 +248,7 @@ public class OperationRouting extends AbstractComponent {
} else {
hash = hash(hashFunction, routing);
}
if (createdVersion.onOrAfter(Version.V_2_0_0)) {
if (createdVersion.onOrAfter(Version.V_2_0_0_beta1)) {
return MathUtils.mod(hash, indexMetaData.numberOfShards());
} else {
return Math.abs(hash % indexMetaData.numberOfShards());

View File

@ -49,7 +49,6 @@ import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentTypeListener;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.index.mapper.MapperService;
@ -95,7 +94,7 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
@Override
public AtomicParentChildFieldData load(LeafReaderContext context) {
if (Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0)) {
if (Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0_beta1)) {
final LeafReader reader = context.reader();
final NavigableSet<String> parentTypes;
synchronized (lock) {

View File

@ -256,7 +256,7 @@ public class DocumentMapperParser extends AbstractIndexComponent {
public static void checkNoRemainingFields(Map<String, Object> fieldNodeMap, Version indexVersionCreated, String message) {
if (!fieldNodeMap.isEmpty()) {
if (indexVersionCreated.onOrAfter(Version.V_2_0_0)) {
if (indexVersionCreated.onOrAfter(Version.V_2_0_0_beta1)) {
throw new MapperParsingException(message + getRemainingFields(fieldNodeMap));
} else {
logger.debug(message + "{}", getRemainingFields(fieldNodeMap));

View File

@ -224,7 +224,7 @@ public abstract class FieldMapper extends Mapper {
}
protected String buildIndexName(BuilderContext context) {
if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0)) {
if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0_beta1)) {
return buildFullName(context);
}
String actualIndexName = indexName == null ? name : indexName;
@ -232,7 +232,7 @@ public abstract class FieldMapper extends Mapper {
}
protected String buildIndexNameClean(BuilderContext context) {
if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0)) {
if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0_beta1)) {
return buildFullName(context);
}
return indexName == null ? name : indexName;
@ -253,7 +253,7 @@ public abstract class FieldMapper extends Mapper {
fieldType.setFieldDataType(new FieldDataType(fieldType.fieldDataType().getType(), settings));
}
boolean defaultDocValues = false; // pre 2.0
if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0)) {
if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0_beta1)) {
defaultDocValues = fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE;
}
// backcompat for "fielddata: format: docvalues" for now...
@ -279,7 +279,7 @@ public abstract class FieldMapper extends Mapper {
protected FieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(simpleName);
assert indexSettings != null;
this.indexCreatedBefore2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0);
this.indexCreatedBefore2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0_beta1);
this.fieldTypeRef = new MappedFieldTypeReference(fieldType); // the reference ctor freezes the field type
defaultFieldType.freeze();
this.defaultFieldType = defaultFieldType;

View File

@ -272,7 +272,7 @@ public class MapperService extends AbstractIndexComponent {
if (mapper.type().contains(",")) {
throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] should not include ',' in it");
}
if (Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0) && mapper.type().equals(mapper.parentFieldMapper().type())) {
if (Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0_beta1) && mapper.type().equals(mapper.parentFieldMapper().type())) {
throw new IllegalArgumentException("The [_parent.type] option can't point to the same type");
}
if (mapper.type().contains(".") && !PercolatorService.TYPE_NAME.equals(mapper.type())) {

View File

@ -67,7 +67,7 @@ public final class Mapping implements ToXContent {
this.metadataMappers = metadataMappers;
ImmutableMap.Builder<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> builder = ImmutableMap.builder();
for (MetadataFieldMapper metadataMapper : metadataMappers) {
if (indexCreated.before(Version.V_2_0_0) && LEGACY_INCLUDE_IN_OBJECT.contains(metadataMapper.name())) {
if (indexCreated.before(Version.V_2_0_0_beta1) && LEGACY_INCLUDE_IN_OBJECT.contains(metadataMapper.name())) {
root.putMapper(metadataMapper);
}
builder.put(metadataMapper.getClass(), metadataMapper);

View File

@ -79,7 +79,7 @@ public class BinaryFieldMapper extends FieldMapper {
@Override
public BinaryFieldMapper build(BuilderContext context) {
setupFieldType(context);
((BinaryFieldType)fieldType).setTryUncompressing(context.indexCreatedVersion().before(Version.V_2_0_0));
((BinaryFieldType)fieldType).setTryUncompressing(context.indexCreatedVersion().before(Version.V_2_0_0_beta1));
return new BinaryFieldMapper(name, fieldType, defaultFieldType,
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
}
@ -93,7 +93,7 @@ public class BinaryFieldMapper extends FieldMapper {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = entry.getKey();
if (parserContext.indexVersionCreated().before(Version.V_2_0_0) &&
if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1) &&
(parserContext.parseFieldMatcher().match(fieldName, COMPRESS) || parserContext.parseFieldMatcher().match(fieldName, COMPRESS_THRESHOLD))) {
iterator.remove();
}

View File

@ -171,7 +171,7 @@ public class CompletionFieldMapper extends FieldMapper {
continue;
}
if (Fields.ANALYZER.equals(fieldName) || // index_analyzer is for backcompat, remove for v3.0
fieldName.equals("index_analyzer") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
fieldName.equals("index_analyzer") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
indexAnalyzer = getNamedAnalyzer(parserContext, fieldNode.toString());
iterator.remove();

View File

@ -125,7 +125,7 @@ public class DateFieldMapper extends NumberFieldMapper {
}
protected void setupFieldType(BuilderContext context) {
if (Version.indexCreated(context.indexSettings()).before(Version.V_2_0_0) &&
if (Version.indexCreated(context.indexSettings()).before(Version.V_2_0_0_beta1) &&
!fieldType().dateTimeFormatter().format().contains("epoch_")) {
String format = fieldType().timeUnit().equals(TimeUnit.SECONDS) ? "epoch_second" : "epoch_millis";
fieldType().setDateTimeFormatter(Joda.forPattern(format + "||" + fieldType().dateTimeFormatter().format()));
@ -183,7 +183,7 @@ public class DateFieldMapper extends NumberFieldMapper {
}
}
if (!configuredFormat) {
if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0)) {
if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
builder.dateTimeFormatter(Defaults.DATE_TIME_FORMATTER);
} else {
builder.dateTimeFormatter(Defaults.DATE_TIME_FORMATTER_BEFORE_2_0);

View File

@ -83,7 +83,7 @@ public class Murmur3FieldMapper extends LongFieldMapper {
Builder builder = murmur3Field(name);
// tweaking these settings is no longer allowed, the entire purpose of murmur3 fields is to store a hash
if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0)) {
if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
if (node.get("doc_values") != null) {
throw new MapperParsingException("Setting [doc_values] cannot be modified for field [" + name + "]");
}

View File

@ -69,7 +69,7 @@ public class TypeParsers {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
pathType = parsePathType(name, fieldNode.toString());
iterator.remove();
} else if (fieldName.equals("fields")) {
@ -188,7 +188,7 @@ public class TypeParsers {
Map.Entry<String, Object> entry = iterator.next();
final String propName = Strings.toUnderscoreCase(entry.getKey());
final Object propNode = entry.getValue();
if (propName.equals("index_name") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
if (propName.equals("index_name") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
builder.indexName(propNode.toString());
iterator.remove();
} else if (propName.equals("store")) {
@ -252,7 +252,7 @@ public class TypeParsers {
builder.indexOptions(nodeIndexOptionValue(propNode));
iterator.remove();
} else if (propName.equals("analyzer") || // for backcompat, reading old indexes, remove for v3.0
propName.equals("index_analyzer") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
propName.equals("index_analyzer") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
if (analyzer == null) {
@ -270,10 +270,10 @@ public class TypeParsers {
} else if (propName.equals("include_in_all")) {
builder.includeInAll(nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("postings_format") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
} else if (propName.equals("postings_format") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
// ignore for old indexes
iterator.remove();
} else if (propName.equals("doc_values_format") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
} else if (propName.equals("doc_values_format") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
// ignore for old indexes
iterator.remove();
} else if (propName.equals("similarity")) {
@ -301,7 +301,7 @@ public class TypeParsers {
}
public static boolean parseMultiField(FieldMapper.Builder builder, String name, Mapper.TypeParser.ParserContext parserContext, String propName, Object propNode) {
if (propName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
if (propName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
builder.multiFieldPathType(parsePathType(name, propNode.toString()));
return true;
} else if (propName.equals("fields")) {

View File

@ -220,7 +220,7 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
builder.multiFieldPathType(parsePathType(name, fieldNode.toString()));
iterator.remove();
} else if (fieldName.equals("lat_lon")) {

View File

@ -120,11 +120,11 @@ public class GeoShapeFieldMapper extends FieldMapper {
public GeoShapeFieldMapper build(BuilderContext context) {
GeoShapeFieldType geoShapeFieldType = (GeoShapeFieldType)fieldType;
if (geoShapeFieldType.tree.equals("quadtree") && context.indexCreatedVersion().before(Version.V_2_0_0)) {
if (geoShapeFieldType.tree.equals("quadtree") && context.indexCreatedVersion().before(Version.V_2_0_0_beta1)) {
geoShapeFieldType.setTree("legacyquadtree");
}
if (context.indexCreatedVersion().before(Version.V_2_0_0) ||
if (context.indexCreatedVersion().before(Version.V_2_0_0_beta1) ||
(geoShapeFieldType.treeLevels() == 0 && geoShapeFieldType.precisionInMeters() < 0)) {
geoShapeFieldType.setDefaultDistanceErrorPct(Defaults.LEGACY_DISTANCE_ERROR_PCT);
}

View File

@ -143,7 +143,7 @@ public class AllFieldMapper extends MetadataFieldMapper {
if (fieldName.equals("enabled")) {
builder.enabled(nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED);
iterator.remove();
} else if (fieldName.equals("auto_boost") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
} else if (fieldName.equals("auto_boost") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
// Old 1.x setting which is now ignored
iterator.remove();
}

View File

@ -113,7 +113,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
}
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
if (parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
parseField(builder, builder.name, node, parserContext);
}

View File

@ -115,7 +115,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0)) {
if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
throw new MapperParsingException(NAME + " is not configurable");
}
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
@ -242,7 +242,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
return existing.clone();
}
MappedFieldType fieldType = Defaults.FIELD_TYPE.clone();
boolean pre2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0);
boolean pre2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0_beta1);
if (pre2x && indexSettings.getAsBoolean("index.mapping._id.indexed", true) == false) {
fieldType.setTokenized(false);
}

View File

@ -102,7 +102,7 @@ public class IndexFieldMapper extends MetadataFieldMapper {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
if (parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
parseField(builder, builder.name, node, parserContext);
}

View File

@ -103,7 +103,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
throw new MapperParsingException("[_parent] field mapping must contain the [type] option");
}
setupFieldType(context);
fieldType.setHasDocValues(context.indexCreatedVersion().onOrAfter(Version.V_2_0_0));
fieldType.setHasDocValues(context.indexCreatedVersion().onOrAfter(Version.V_2_0_0_beta1));
return new ParentFieldMapper(fieldType, type, context.indexSettings());
}
}
@ -119,7 +119,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
if (fieldName.equals("type")) {
builder.type(fieldNode.toString());
iterator.remove();
} else if (fieldName.equals("postings_format") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
} else if (fieldName.equals("postings_format") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
// ignore before 2.0, reject on and after 2.0
iterator.remove();
} else if (fieldName.equals("fielddata")) {
@ -235,7 +235,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
static MappedFieldType setupDocValues(Settings indexSettings, MappedFieldType fieldType) {
fieldType = fieldType.clone();
fieldType.setHasDocValues(Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0));
fieldType.setHasDocValues(Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0_beta1));
return fieldType;
}

View File

@ -102,7 +102,7 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
if (parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
parseField(builder, builder.name, node, parserContext);
}
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
@ -112,7 +112,7 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
if (fieldName.equals("required")) {
builder.required(nodeBooleanValue(fieldNode));
iterator.remove();
} else if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
} else if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
builder.path(fieldNode.toString());
iterator.remove();
}

View File

@ -95,7 +95,7 @@ public class SizeFieldMapper extends MetadataFieldMapper {
if (fieldName.equals("enabled")) {
builder.enabled(nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED);
iterator.remove();
} else if (fieldName.equals("store") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
} else if (fieldName.equals("store") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
builder.store(parseStore(fieldName, fieldNode.toString()));
iterator.remove();
}

View File

@ -155,12 +155,12 @@ public class SourceFieldMapper extends MetadataFieldMapper {
if (fieldName.equals("enabled")) {
builder.enabled(nodeBooleanValue(fieldNode));
iterator.remove();
} else if (fieldName.equals("compress") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
} else if (fieldName.equals("compress") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
if (fieldNode != null) {
builder.compress(nodeBooleanValue(fieldNode));
}
iterator.remove();
} else if (fieldName.equals("compress_threshold") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
} else if (fieldName.equals("compress_threshold") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
if (fieldNode != null) {
if (fieldNode instanceof Number) {
builder.compressThreshold(((Number) fieldNode).longValue());

View File

@ -142,7 +142,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
@Override
public TimestampFieldMapper build(BuilderContext context) {
if (explicitStore == false && context.indexCreatedVersion().before(Version.V_2_0_0)) {
if (explicitStore == false && context.indexCreatedVersion().before(Version.V_2_0_0_beta1)) {
fieldType.setStored(false);
}
@ -158,7 +158,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
private static FormatDateTimeFormatter getDateTimeFormatter(Settings indexSettings) {
Version indexCreated = Version.indexCreated(indexSettings);
if (indexCreated.onOrAfter(Version.V_2_0_0)) {
if (indexCreated.onOrAfter(Version.V_2_0_0_beta1)) {
return Defaults.DATE_TIME_FORMATTER;
} else {
return Defaults.DATE_TIME_FORMATTER_BEFORE_2_0;
@ -169,7 +169,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
if (parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
parseField(builder, builder.name, node, parserContext);
}
boolean defaultSet = false;
@ -182,7 +182,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
EnabledAttributeMapper enabledState = nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED;
builder.enabled(enabledState);
iterator.remove();
} else if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
} else if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
builder.path(fieldNode.toString());
iterator.remove();
} else if (fieldName.equals("format")) {
@ -246,7 +246,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
if (existing != null) {
return existing;
}
return Version.indexCreated(settings).onOrAfter(Version.V_2_0_0) ? Defaults.FIELD_TYPE : Defaults.PRE_20_FIELD_TYPE;
return Version.indexCreated(settings).onOrAfter(Version.V_2_0_0_beta1) ? Defaults.FIELD_TYPE : Defaults.PRE_20_FIELD_TYPE;
}
private EnabledAttributeMapper enabledState;

View File

@ -94,7 +94,7 @@ public class TypeFieldMapper extends MetadataFieldMapper {
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0)) {
if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
throw new MapperParsingException(NAME + " is not configurable");
}
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));

View File

@ -87,7 +87,7 @@ public class UidFieldMapper extends MetadataFieldMapper {
@Override
public UidFieldMapper build(BuilderContext context) {
setupFieldType(context);
fieldType.setHasDocValues(context.indexCreatedVersion().before(Version.V_2_0_0));
fieldType.setHasDocValues(context.indexCreatedVersion().before(Version.V_2_0_0_beta1));
return new UidFieldMapper(fieldType, defaultFieldType, context.indexSettings());
}
}
@ -95,7 +95,7 @@ public class UidFieldMapper extends MetadataFieldMapper {
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0)) {
if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
throw new MapperParsingException(NAME + " is not configurable");
}
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));

View File

@ -79,7 +79,7 @@ public class VersionFieldMapper extends MetadataFieldMapper {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
if (fieldName.equals("doc_values_format") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
if (fieldName.equals("doc_values_format") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
// ignore in 1.x, reject in 2.x
iterator.remove();
}

View File

@ -236,7 +236,7 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
}
protected static boolean parseObjectProperties(String name, String fieldName, Object fieldNode, ParserContext parserContext, ObjectMapper.Builder builder) {
if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
builder.pathType(parsePathType(name, fieldNode.toString()));
return true;
}

View File

@ -178,7 +178,7 @@ public class HasChildQueryParser implements QueryParser {
final Query query;
final ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper.fieldType());
if (parseContext.indexVersionCreated().onOrAfter(Version.V_2_0_0)) {
if (parseContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
query = joinUtilHelper(parentType, parentChildIndexFieldData, parentDocMapper.typeFilter(), scoreType, innerQuery, minChildren, maxChildren);
} else {
// TODO: use the query API

View File

@ -202,7 +202,7 @@ public class HasParentQueryParser implements QueryParser {
// wrap the query with type query
innerQuery = Queries.filtered(innerQuery, parentDocMapper.typeFilter());
Filter childrenFilter = new QueryWrapperFilter(Queries.not(parentFilter));
if (parseContext.indexVersionCreated().onOrAfter(Version.V_2_0_0)) {
if (parseContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
ScoreType scoreMode = score ? ScoreType.MAX : ScoreType.NONE;
return joinUtilHelper(parentType, parentChildIndexFieldData, childrenFilter, scoreMode, innerQuery, 0, Integer.MAX_VALUE);
} else {

View File

@ -594,7 +594,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep
* @return true if legacy version should be used false otherwise
*/
private boolean legacyMetaData(Version version) {
return version.before(Version.V_2_0_0);
return version.before(Version.V_2_0_0_beta1);
}
/**

View File

@ -162,7 +162,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis
MetaData metaDataIn = repository.readSnapshotMetaData(snapshotId, snapshot, filteredIndices);
final MetaData metaData;
if (snapshot.version().before(Version.V_2_0_0)) {
if (snapshot.version().before(Version.V_2_0_0_beta1)) {
// ES 2.0 now requires units for all time and byte-sized settings, so we add the default unit if it's missing in this snapshot:
metaData = MetaData.addDefaultUnitsIfNeeded(logger, metaDataIn);
} else {

View File

@ -129,18 +129,30 @@ public class VersionTests extends ElasticsearchTestCase {
}
public void testMinCompatVersion() {
assertThat(Version.V_2_0_0.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0));
assertThat(Version.V_2_0_0_beta1.minimumCompatibilityVersion(), equalTo(Version.V_2_0_0_beta1));
assertThat(Version.V_1_3_0.minimumCompatibilityVersion(), equalTo(Version.V_1_0_0));
assertThat(Version.V_1_2_0.minimumCompatibilityVersion(), equalTo(Version.V_1_0_0));
assertThat(Version.V_1_2_3.minimumCompatibilityVersion(), equalTo(Version.V_1_0_0));
assertThat(Version.V_1_0_0_RC2.minimumCompatibilityVersion(), equalTo(Version.V_1_0_0_RC2));
}
public void testToString() {
// with 2.0.beta we lowercase
assertEquals("2.0.0.beta1", Version.V_2_0_0_beta1.number());
assertEquals("1.4.0.Beta1", Version.V_1_4_0_Beta1.number());
assertEquals("1.4.0", Version.V_1_4_0.number());
}
public void testIsBeta() {
assertTrue(Version.V_2_0_0_beta1.isBeta());
assertTrue(Version.V_1_4_0_Beta1.isBeta());
assertFalse(Version.V_1_4_0.isBeta());
}
public void testParseVersion() {
final int iters = scaledRandomIntBetween(100, 1000);
for (int i = 0; i < iters; i++) {
Version version = randomVersion(random());
String stringVersion = version.toString();
if (version.snapshot() == false && random().nextBoolean()) {
version = new Version(version.id, true, version.luceneVersion);
}

View File

@ -64,7 +64,7 @@ public class RoutingBackwardCompatibilityTests extends ElasticsearchTestCase {
RoutingTable routingTable = RoutingTable.builder().addAsNew(indexMetaData).build();
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build();
final int shardId = operationRouting.indexShards(clusterState, index, type, id, routing).shardId().getId();
if (version.before(Version.V_2_0_0)) {
if (version.before(Version.V_2_0_0_beta1)) {
assertEquals(pre20ExpectedShardId, shardId);
} else {
assertEquals(currentExpectedShard, shardId);

View File

@ -146,7 +146,7 @@ public class MockDiskUsagesTests extends ElasticsearchIntegrationTest {
usage.getTotalBytes(), usage.getFreeBytes(), usage.getFreeBytes());
paths[0] = path;
FsInfo fsInfo = new FsInfo(System.currentTimeMillis(), paths);
return new NodeStats(new DiscoveryNode(nodeName, null, Version.V_2_0_0),
return new NodeStats(new DiscoveryNode(nodeName, null, Version.V_2_0_0_beta1),
System.currentTimeMillis(),
null, null, null, null, null,
fsInfo,

View File

@ -51,11 +51,11 @@ public class CodecTests extends ElasticsearchSingleNodeTest {
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
try {
parser.parse(mapping);
if (v.onOrAfter(Version.V_2_0_0)) {
if (v.onOrAfter(Version.V_2_0_0_beta1)) {
fail("Elasticsearch 2.0 should not support custom postings formats");
}
} catch (MapperParsingException e) {
if (v.before(Version.V_2_0_0)) {
if (v.before(Version.V_2_0_0_beta1)) {
// Elasticsearch 1.x should ignore custom postings formats
throw e;
}
@ -74,11 +74,11 @@ public class CodecTests extends ElasticsearchSingleNodeTest {
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
try {
parser.parse(mapping);
if (v.onOrAfter(Version.V_2_0_0)) {
if (v.onOrAfter(Version.V_2_0_0_beta1)) {
fail("Elasticsearch 2.0 should not support custom postings formats");
}
} catch (MapperParsingException e) {
if (v.before(Version.V_2_0_0)) {
if (v.before(Version.V_2_0_0_beta1)) {
// Elasticsearch 1.x should ignore custom postings formats
throw e;
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.discovery.zen;
import com.google.common.collect.Iterables;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.Version;
@ -48,9 +47,7 @@ import org.hamcrest.Matchers;
import org.junit.Test;
import java.io.IOException;
import java.lang.ref.Reference;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CountDownLatch;
@ -228,7 +225,7 @@ public class ZenDiscoveryTests extends ElasticsearchIntegrationTest {
.put("discovery.type", "zen") // <-- To override the local setting if set externally
.put("node.mode", "local") // <-- force local transport so we can fake a network address
.build();
String nodeName = internalCluster().startNode(nodeSettings, Version.V_2_0_0);
String nodeName = internalCluster().startNode(nodeSettings, Version.V_2_0_0_beta1);
ZenDiscovery zenDiscovery = (ZenDiscovery) internalCluster().getInstance(Discovery.class, nodeName);
DiscoveryNode node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_1_6_0);
@ -250,9 +247,9 @@ public class ZenDiscoveryTests extends ElasticsearchIntegrationTest {
@Test
public void testJoinElectedMaster_incompatibleMinVersion() {
ElectMasterService electMasterService = new ElectMasterService(Settings.EMPTY, Version.V_2_0_0);
ElectMasterService electMasterService = new ElectMasterService(Settings.EMPTY, Version.V_2_0_0_beta1);
DiscoveryNode node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_2_0_0);
DiscoveryNode node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_2_0_0_beta1);
assertThat(electMasterService.electMaster(Collections.singletonList(node)), sameInstance(node));
node = new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.V_1_6_0);
assertThat("Can't join master because version 1.6.0 is lower than the minimum compatable version 2.0.0 can support", electMasterService.electMaster(Collections.singletonList(node)), nullValue());

View File

@ -1733,7 +1733,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
for (Path indexFile : indexes.subList(0, scaledRandomIntBetween(1, indexes.size() / 2))) {
final String indexName = indexFile.getFileName().toString().replace(".zip", "").toLowerCase(Locale.ROOT);
Version version = Version.fromString(indexName.replace("index-", ""));
if (version.onOrAfter(Version.V_2_0_0)) {
if (version.onOrAfter(Version.V_2_0_0_beta1)) {
continue;
}
Path unzipDir = createTempDir();

View File

@ -50,7 +50,7 @@ import java.util.List;
import java.util.Map;
import static org.elasticsearch.Version.V_1_5_0;
import static org.elasticsearch.Version.V_2_0_0;
import static org.elasticsearch.Version.V_2_0_0_beta1;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.elasticsearch.test.VersionUtils.randomVersionBetween;
@ -96,17 +96,17 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
@Test
public void testDefaultValues() throws Exception {
for (Version version : Arrays.asList(V_1_5_0, V_2_0_0, randomVersion(random()))) {
for (Version version : Arrays.asList(V_1_5_0, V_2_0_0_beta1, randomVersion(random()))) {
for (String mapping : Arrays.asList(
XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string(),
XContentFactory.jsonBuilder().startObject().startObject("type").startObject("_timestamp").endObject().endObject().string())) {
DocumentMapper docMapper = createIndex("test", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build()).mapperService().documentMapperParser().parse(mapping);
assertThat(docMapper.timestampFieldMapper().enabled(), equalTo(TimestampFieldMapper.Defaults.ENABLED.enabled));
assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(version.onOrAfter(Version.V_2_0_0)));
assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(version.onOrAfter(Version.V_2_0_0_beta1)));
assertThat(docMapper.timestampFieldMapper().fieldType().indexOptions(), equalTo(TimestampFieldMapper.Defaults.FIELD_TYPE.indexOptions()));
assertThat(docMapper.timestampFieldMapper().path(), equalTo(TimestampFieldMapper.Defaults.PATH));
assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(version.onOrAfter(Version.V_2_0_0)));
String expectedFormat = version.onOrAfter(Version.V_2_0_0) ? TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT :
assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(version.onOrAfter(Version.V_2_0_0_beta1)));
String expectedFormat = version.onOrAfter(Version.V_2_0_0_beta1) ? TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT :
TimestampFieldMapper.Defaults.DATE_TIME_FORMATTER_BEFORE_2_0.format();
assertThat(docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), equalTo(expectedFormat));
assertAcked(client().admin().indices().prepareDelete("test").execute().get());

View File

@ -1168,7 +1168,7 @@ public class TranslogTests extends ElasticsearchTestCase {
for (Path indexFile : indexes) {
final String indexName = indexFile.getFileName().toString().replace(".zip", "").toLowerCase(Locale.ROOT);
Version version = Version.fromString(indexName.replace("index-", ""));
if (version.onOrAfter(Version.V_2_0_0)) {
if (version.onOrAfter(Version.V_2_0_0_beta1)) {
continue;
}
Path unzipDir = createTempDir();
@ -1195,7 +1195,7 @@ public class TranslogTests extends ElasticsearchTestCase {
final long generation = parseLegacyTranslogFile(tlogFiles[0]);
assertTrue(generation >= 1);
logger.debug("upgrading index {} file: {} size: {}", indexName, tlogFiles[0].getFileName(), size);
logger.info("upgrading index {} file: {} size: {}", indexName, tlogFiles[0].getFileName(), size);
TranslogConfig upgradeConfig = new TranslogConfig(config.getShardId(), translog, config.getIndexSettings(), config.getDurabilty(), config.getBigArrays(), config.getThreadPool());
upgradeConfig.setTranslogGeneration(new Translog.TranslogGeneration(null, generation));
Translog.upgradeLegacyTranslog(logger, upgradeConfig);
@ -1203,7 +1203,7 @@ public class TranslogTests extends ElasticsearchTestCase {
assertEquals(generation + 1, upgraded.getGeneration().translogFileGeneration);
assertEquals(upgraded.getRecoveredReaders().size(), 1);
final long headerSize;
if (version.before(Version.V_1_4_0)) {
if (version.before(Version.V_1_4_0_Beta1)) {
assertTrue(upgraded.getRecoveredReaders().get(0).getClass().toString(), upgraded.getRecoveredReaders().get(0).getClass() == LegacyTranslogReader.class);
headerSize = 0;
} else {

View File

@ -513,7 +513,7 @@ public abstract class ElasticsearchTestCase extends LuceneTestCase {
/** Return consistent index settings for the provided index version. */
public static Settings.Builder settings(Version version) {
Settings.Builder builder = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version);
if (version.before(Version.V_2_0_0)) {
if (version.before(Version.V_2_0_0_beta1)) {
builder.put(IndexMetaData.SETTING_LEGACY_ROUTING_HASH_FUNCTION, DjbHashFunction.class);
}
return builder;

View File

@ -2,7 +2,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch-dev-tools</artifactId>
<version>2.0.0-SNAPSHOT</version>
<version>2.0.0.beta1-SNAPSHOT</version>
<name>Elasticsearch Build Resources</name>
<parent>
<groupId>org.sonatype.oss</groupId>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>org.elasticsearch.plugin</groupId>
<artifactId>elasticsearch-plugin</artifactId>
<version>2.0.0-SNAPSHOT</version>
<version>2.0.0.beta1-SNAPSHOT</version>
</parent>
<artifactId>elasticsearch-analysis-icu</artifactId>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>org.elasticsearch.plugin</groupId>
<artifactId>elasticsearch-plugin</artifactId>
<version>2.0.0-SNAPSHOT</version>
<version>2.0.0.beta1-SNAPSHOT</version>
</parent>
<artifactId>elasticsearch-analysis-kuromoji</artifactId>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>org.elasticsearch.plugin</groupId>
<artifactId>elasticsearch-plugin</artifactId>
<version>2.0.0-SNAPSHOT</version>
<version>2.0.0.beta1-SNAPSHOT</version>
</parent>
<artifactId>elasticsearch-analysis-phonetic</artifactId>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>org.elasticsearch.plugin</groupId>
<artifactId>elasticsearch-plugin</artifactId>
<version>2.0.0-SNAPSHOT</version>
<version>2.0.0.beta1-SNAPSHOT</version>
</parent>
<artifactId>elasticsearch-analysis-smartcn</artifactId>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>org.elasticsearch.plugin</groupId>
<artifactId>elasticsearch-plugin</artifactId>
<version>2.0.0-SNAPSHOT</version>
<version>2.0.0.beta1-SNAPSHOT</version>
</parent>
<artifactId>elasticsearch-analysis-stempel</artifactId>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>org.elasticsearch.plugin</groupId>
<artifactId>elasticsearch-plugin</artifactId>
<version>2.0.0-SNAPSHOT</version>
<version>2.0.0.beta1-SNAPSHOT</version>
</parent>
<artifactId>elasticsearch-cloud-aws</artifactId>

View File

@ -18,7 +18,7 @@ governing permissions and limitations under the License. -->
<parent>
<groupId>org.elasticsearch.plugin</groupId>
<artifactId>elasticsearch-plugin</artifactId>
<version>2.0.0-SNAPSHOT</version>
<version>2.0.0.beta1-SNAPSHOT</version>
</parent>
<artifactId>elasticsearch-cloud-azure</artifactId>

View File

@ -18,7 +18,7 @@ governing permissions and limitations under the License. -->
<parent>
<groupId>org.elasticsearch.plugin</groupId>
<artifactId>elasticsearch-plugin</artifactId>
<version>2.0.0-SNAPSHOT</version>
<version>2.0.0.beta1-SNAPSHOT</version>
</parent>
<artifactId>elasticsearch-cloud-gce</artifactId>

View File

@ -18,7 +18,7 @@ governing permissions and limitations under the License. -->
<parent>
<groupId>org.elasticsearch.plugin</groupId>
<artifactId>elasticsearch-plugin</artifactId>
<version>2.0.0-SNAPSHOT</version>
<version>2.0.0.beta1-SNAPSHOT</version>
</parent>
<artifactId>elasticsearch-delete-by-query</artifactId>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>org.elasticsearch.plugin</groupId>
<artifactId>elasticsearch-plugin</artifactId>
<version>2.0.0-SNAPSHOT</version>
<version>2.0.0.beta1-SNAPSHOT</version>
</parent>
<artifactId>elasticsearch-lang-javascript</artifactId>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>org.elasticsearch.plugin</groupId>
<artifactId>elasticsearch-plugin</artifactId>
<version>2.0.0-SNAPSHOT</version>
<version>2.0.0.beta1-SNAPSHOT</version>
</parent>
<artifactId>elasticsearch-lang-python</artifactId>

View File

@ -7,7 +7,7 @@
<groupId>org.elasticsearch.plugin</groupId>
<artifactId>elasticsearch-plugin</artifactId>
<version>2.0.0-SNAPSHOT</version>
<version>2.0.0.beta1-SNAPSHOT</version>
<packaging>pom</packaging>
<name>Elasticsearch Plugin POM</name>
<inceptionYear>2009</inceptionYear>
@ -15,7 +15,7 @@
<parent>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch-parent</artifactId>
<version>2.0.0-SNAPSHOT</version>
<version>2.0.0.beta1-SNAPSHOT</version>
</parent>
<properties>

View File

@ -6,7 +6,7 @@
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch-parent</artifactId>
<version>2.0.0-SNAPSHOT</version>
<version>2.0.0.beta1-SNAPSHOT</version>
<packaging>pom</packaging>
<name>Elasticsearch Parent POM</name>
<description>Elasticsearch Parent POM</description>
@ -35,7 +35,7 @@
<properties>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<!-- elasticsearch stack -->
<elasticsearch.version>2.0.0-SNAPSHOT</elasticsearch.version>
<elasticsearch.version>${version}</elasticsearch.version>
<jvm.executable>${java.home}${file.separator}bin${file.separator}java</jvm.executable>
<maven.compiler.source>1.7</maven.compiler.source>
<maven.compiler.target>1.7</maven.compiler.target>

View File

@ -2,7 +2,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch-rest-api-spec</artifactId>
<version>2.0.0-SNAPSHOT</version>
<version>2.0.0.beta1-SNAPSHOT</version>
<name>Elasticsearch Rest API Spec</name>
<parent>
<groupId>org.sonatype.oss</groupId>