diff --git a/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java index 57b244ecefc..f87783e142d 100644 --- a/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -22,6 +22,7 @@ package org.elasticsearch.action.index; import com.google.common.base.Charsets; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.Version; import org.elasticsearch.action.*; import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.client.Requests; @@ -562,8 +563,10 @@ public class IndexRequest extends ReplicationRequest implements Do routing(metaData.resolveIndexRouting(routing, index)); // resolve timestamp if provided externally if (timestamp != null) { + Version version = Version.indexCreated(metaData.getIndices().get(concreteIndex).settings()); timestamp = MappingMetaData.Timestamp.parseStringTimestamp(timestamp, - mappingMd != null ? mappingMd.timestamp().dateTimeFormatter() : TimestampFieldMapper.Defaults.DATE_TIME_FORMATTER); + mappingMd != null ? mappingMd.timestamp().dateTimeFormatter() : TimestampFieldMapper.Defaults.DATE_TIME_FORMATTER, + version); } // extract values if needed if (mappingMd != null) { @@ -586,7 +589,8 @@ public class IndexRequest extends ReplicationRequest implements Do if (parseContext.shouldParseTimestamp()) { timestamp = parseContext.timestamp(); if (timestamp != null) { - timestamp = MappingMetaData.Timestamp.parseStringTimestamp(timestamp, mappingMd.timestamp().dateTimeFormatter()); + Version version = Version.indexCreated(metaData.getIndices().get(concreteIndex).settings()); + timestamp = MappingMetaData.Timestamp.parseStringTimestamp(timestamp, mappingMd.timestamp().dateTimeFormatter(), version); } } } catch (MapperParsingException e) { @@ -638,7 +642,8 @@ public class IndexRequest extends ReplicationRequest implements Do if (defaultTimestamp.equals(TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP)) { timestamp = Long.toString(System.currentTimeMillis()); } else { - timestamp = MappingMetaData.Timestamp.parseStringTimestamp(defaultTimestamp, mappingMd.timestamp().dateTimeFormatter()); + Version version = Version.indexCreated(metaData.getIndices().get(concreteIndex).settings()); + timestamp = MappingMetaData.Timestamp.parseStringTimestamp(defaultTimestamp, mappingMd.timestamp().dateTimeFormatter(), version); } } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java index 468029a2b51..741d173d8f9 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.metadata; +import org.elasticsearch.Version; import org.elasticsearch.action.TimestampParsingException; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.common.Nullable; @@ -160,10 +161,22 @@ public class MappingMetaData extends AbstractDiffable { public static class Timestamp { - public static String parseStringTimestamp(String timestampAsString, FormatDateTimeFormatter dateTimeFormatter) throws TimestampParsingException { + private static final FormatDateTimeFormatter EPOCH_MILLIS_PARSER = Joda.forPattern("epoch_millis"); + + public static String parseStringTimestamp(String timestampAsString, FormatDateTimeFormatter dateTimeFormatter, + Version version) throws TimestampParsingException { try { - return Long.toString(dateTimeFormatter.parser().parseMillis(timestampAsString)); + // no need for unix timestamp parsing in 2.x + FormatDateTimeFormatter formatter = version.onOrAfter(Version.V_2_0_0) ? dateTimeFormatter : EPOCH_MILLIS_PARSER; + return Long.toString(formatter.parser().parseMillis(timestampAsString)); } catch (RuntimeException e) { + if (version.before(Version.V_2_0_0)) { + try { + return Long.toString(dateTimeFormatter.parser().parseMillis(timestampAsString)); + } catch (RuntimeException e1) { + throw new TimestampParsingException(timestampAsString, e1); + } + } throw new TimestampParsingException(timestampAsString, e); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java index c7a290ed3bd..18a9af639b5 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java @@ -30,6 +30,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.ToStringUtils; +import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; @@ -46,18 +47,17 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericDateAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.LongFieldMapper.CustomLongNumericField; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.search.internal.SearchContext; import org.joda.time.DateTimeZone; import java.io.IOException; -import java.util.Iterator; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Objects; +import java.util.*; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; @@ -70,7 +70,7 @@ public class DateFieldMapper extends NumberFieldMapper { public static final String CONTENT_TYPE = "date"; public static class Defaults extends NumberFieldMapper.Defaults { - public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("dateOptionalTime", Locale.ROOT); + public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("dateOptionalTime||epoch_millis", Locale.ROOT); public static final TimeUnit TIME_UNIT = TimeUnit.MILLISECONDS; public static final DateFieldType FIELD_TYPE = new DateFieldType(); @@ -126,6 +126,14 @@ public class DateFieldMapper extends NumberFieldMapper { protected void setupFieldType(BuilderContext context) { FormatDateTimeFormatter dateTimeFormatter = fieldType().dateTimeFormatter; + // TODO MOVE ME OUTSIDE OF THIS SPACE? + if (Version.indexCreated(context.indexSettings()).before(Version.V_2_0_0)) { + boolean includesEpochFormatter = dateTimeFormatter.format().contains("epoch_"); + if (!includesEpochFormatter) { + String format = fieldType().timeUnit().equals(TimeUnit.SECONDS) ? "epoch_second" : "epoch_millis"; + fieldType().setDateTimeFormatter(Joda.forPattern(format + "||" + dateTimeFormatter.format())); + } + } if (!locale.equals(dateTimeFormatter.locale())) { fieldType().setDateTimeFormatter(new FormatDateTimeFormatter(dateTimeFormatter.format(), dateTimeFormatter.parser(), dateTimeFormatter.printer(), locale)); } @@ -308,15 +316,7 @@ public class DateFieldMapper extends NumberFieldMapper { } protected long parseStringValue(String value) { - try { - return dateTimeFormatter().parser().parseMillis(value); - } catch (RuntimeException e) { - try { - return timeUnit().toMillis(Long.parseLong(value)); - } catch (NumberFormatException e1) { - throw new MapperParsingException("failed to parse date field [" + value + "], tried both date format [" + dateTimeFormatter().format() + "], and timestamp number with locale [" + dateTimeFormatter().locale() + "]", e); - } - } + return dateTimeFormatter().parser().parseMillis(value); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java index e4f5a8d7a03..bb411028a76 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java @@ -147,6 +147,9 @@ public class RootObjectMapper extends ObjectMapper { List dateTimeFormatters = newArrayList(); if (fieldNode instanceof List) { for (Object node1 : (List) fieldNode) { + if (node1.toString().startsWith("epoch_")) { + throw new MapperParsingException("Epoch ["+ node1.toString() +"] is not supported as dynamic date format"); + } dateTimeFormatters.add(parseDateTimeFormatter(node1)); } } else if ("none".equals(fieldNode.toString())) { diff --git a/core/src/test/java/org/elasticsearch/count/simple/SimpleCountTests.java b/core/src/test/java/org/elasticsearch/count/simple/SimpleCountTests.java index 8d613fb6acf..59b38b1cf3b 100644 --- a/core/src/test/java/org/elasticsearch/count/simple/SimpleCountTests.java +++ b/core/src/test/java/org/elasticsearch/count/simple/SimpleCountTests.java @@ -181,7 +181,7 @@ public class SimpleCountTests extends ElasticsearchIntegrationTest { } @Test - public void testThatNonEpochDatesCanBeSearch() throws Exception { + public void testThatNonEpochDatesCanBeSearched() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1") @@ -201,16 +201,9 @@ public class SimpleCountTests extends ElasticsearchIntegrationTest { .endObject(); assertThat(client().prepareIndex("test", "type1").setSource(document).get().isCreated(), is(true)); - // this is a timestamp in 2015 and should not be returned in counting when filtering by year - document = jsonBuilder() - .startObject() - .field("date_field", "1433236702") - .endObject(); - assertThat(client().prepareIndex("test", "type1").setSource(document).get().isCreated(), is(true)); - refresh(); - assertHitCount(client().prepareCount("test").get(), 3); + assertHitCount(client().prepareCount("test").get(), 2); CountResponse countResponse = client().prepareCount("test").setQuery(QueryBuilders.rangeQuery("date_field").from("2015010100").to("2015123123")).get(); assertHitCount(countResponse, 1); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/date/DateBackwardsCompatibilityTests.java b/core/src/test/java/org/elasticsearch/index/mapper/date/DateBackwardsCompatibilityTests.java new file mode 100644 index 00000000000..05dd79de1ea --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/date/DateBackwardsCompatibilityTests.java @@ -0,0 +1,205 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.date; + +import com.google.common.collect.Lists; +import org.elasticsearch.Version; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.test.ElasticsearchSingleNodeTest; +import org.junit.Before; + +import java.util.List; + +import static org.elasticsearch.common.settings.Settings.settingsBuilder; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.test.VersionUtils.randomVersionBetween; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoSearchHits; +import static org.hamcrest.Matchers.is; + +/** + * Test class to check for all the conditions defined in + * https://github.com/elastic/elasticsearch/issues/10971 + */ +public class DateBackwardsCompatibilityTests extends ElasticsearchSingleNodeTest { + + private String index = "testindex"; + private String type = "testtype"; + private Version randomVersionBelow2x; + + @Before + public void setup() throws Exception { + randomVersionBelow2x = randomVersionBetween(getRandom(), Version.V_0_90_0, Version.V_1_6_1); + } + + public void testThatPre2xIndicesNumbersAreTreatedAsEpochs() throws Exception { + index = createPre2xIndexAndMapping(); + long dateInMillis = 1435073872l * 1000; // Tue Jun 23 17:37:52 CEST 2015 + XContentBuilder document = jsonBuilder().startObject().field("date_field", dateInMillis).endObject(); + index(document); + + // search for date in time range + QueryBuilder query = QueryBuilders.rangeQuery("date_field").from("2015-06-23").to("2015-06-24"); + SearchResponse response = client().prepareSearch(index).setQuery(query).get(); + assertHitCount(response, 1); + } + + public void testThatPre2xFailedStringParsingLeadsToEpochParsing() throws Exception { + index = createPre2xIndexAndMapping(); + long dateInMillis = 1435073872l * 1000; // Tue Jun 23 17:37:52 CEST 2015 + String date = String.valueOf(dateInMillis); + XContentBuilder document = jsonBuilder().startObject().field("date_field", date).endObject(); + index(document); + + // search for date in time range + QueryBuilder query = QueryBuilders.rangeQuery("date_field").from("2015-06-23").to("2015-06-24"); + SearchResponse response = client().prepareSearch(index).setQuery(query).get(); + assertHitCount(response, 1); + } + + public void testThatPre2xSupportsUnixTimestampsInAnyDateFormat() throws Exception { + long dateInMillis = 1435073872l * 1000; // Tue Jun 23 17:37:52 CEST 2015 + List dateFormats = Lists.newArrayList("dateOptionalTime", "weekDate", "tTime", "ordinalDate", "hourMinuteSecond", "hourMinute"); + + for (String format : dateFormats) { + XContentBuilder mapping = jsonBuilder().startObject().startObject("properties") + .startObject("date_field").field("type", "date").field("format", format).endObject() + .endObject().endObject(); + + index = createIndex(randomVersionBelow2x, mapping); + + XContentBuilder document = XContentFactory.jsonBuilder() + .startObject() + .field("date_field", String.valueOf(dateInMillis)) + .endObject(); + index(document); + + // indexing as regular timestamp should work as well + document = XContentFactory.jsonBuilder() + .startObject() + .field("date_field", dateInMillis) + .endObject(); + index(document); + + client().admin().indices().prepareDelete(index).get(); + } + } + + public void testThatPre2xIndicesNumbersAreTreatedAsTimestamps() throws Exception { + // looks like a unix time stamp but is meant as 2016-06-23T01:00:00.000 - see the specified date format + long date = 2015062301000l; + + XContentBuilder mapping = jsonBuilder().startObject().startObject("properties") + .startObject("date_field").field("type", "date").field("format","yyyyMMddHHSSS").endObject() + .endObject().endObject(); + index = createIndex(randomVersionBelow2x, mapping); + + XContentBuilder document = XContentFactory.jsonBuilder() + .startObject() + .field("date_field", randomBoolean() ? String.valueOf(date) : date) + .endObject(); + index(document); + + // no results in expected time range + QueryBuilder query = QueryBuilders.rangeQuery("date_field").from("2015-06-23").to("2015-06-24").format("dateOptionalTime"); + SearchResponse response = client().prepareSearch(index).setQuery(query).get(); + assertNoSearchHits(response); + + // result in unix timestamp range + QueryBuilder timestampQuery = QueryBuilders.rangeQuery("date_field").from(2015062300000L).to(2015062302000L); + assertHitCount(client().prepareSearch(index).setQuery(timestampQuery).get(), 1); + + // result should also work with regular specified dates + QueryBuilder regularTimeQuery = QueryBuilders.rangeQuery("date_field").from("2033-11-08").to("2033-11-09").format("dateOptionalTime"); + assertHitCount(client().prepareSearch(index).setQuery(regularTimeQuery).get(), 1); + } + + public void testThatPost2xIndicesNumbersAreTreatedAsStrings() throws Exception { + // looks like a unix time stamp but is meant as 2016-06-23T01:00:00.000 - see the specified date format + long date = 2015062301000l; + + XContentBuilder mapping = jsonBuilder().startObject().startObject("properties") + .startObject("date_field").field("type", "date").field("format","yyyyMMddHHSSS").endObject() + .endObject().endObject(); + index = createIndex(Version.CURRENT, mapping); + + XContentBuilder document = XContentFactory.jsonBuilder() + .startObject() + .field("date_field", String.valueOf(date)) + .endObject(); + index(document); + + document = XContentFactory.jsonBuilder() + .startObject() + .field("date_field", date) + .endObject(); + index(document); + + // search for date in time range + QueryBuilder query = QueryBuilders.rangeQuery("date_field").from("2015-06-23").to("2015-06-24").format("dateOptionalTime"); + SearchResponse response = client().prepareSearch(index).setQuery(query).get(); + assertHitCount(response, 2); + } + + public void testDynamicDateDetectionIn2xDoesNotSupportEpochs() throws Exception { + try { + XContentBuilder mapping = jsonBuilder().startObject() + .startArray("dynamic_date_formats").value("dateOptionalTime").value("epoch_seconds").endArray() + .endObject(); + createIndex(Version.CURRENT, mapping); + fail("Expected a MapperParsingException, but did not happen"); + } catch (MapperParsingException e) { + assertThat(e.getMessage(), is("mapping [" + type + "]")); + } + } + + private String createPre2xIndexAndMapping() throws Exception { + return createIndexAndMapping(randomVersionBelow2x); + } + + private String createIndexAndMapping(Version version) throws Exception { + XContentBuilder mapping = jsonBuilder().startObject().startObject("properties") + .startObject("date_field").field("type", "date").field("format", "dateOptionalTime").endObject() + .endObject().endObject(); + + return createIndex(version, mapping); + } + + private String createIndex(Version version, XContentBuilder mapping) { + Settings settings = settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); + createIndex(index, settings, type, mapping); + + ensureGreen(index); + return index; + } + + private void index(XContentBuilder document) { + IndexResponse indexResponse = client().prepareIndex(index, type).setSource(document).setRefresh(true).get(); + assertThat(indexResponse.isCreated(), is(true)); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java index b92bf809f65..32fbe203984 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java @@ -25,7 +25,9 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.util.Constants; +import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.LocaleUtils; @@ -53,6 +55,7 @@ import java.util.*; import static com.carrotsearch.randomizedtesting.RandomizedTest.systemPropertyAsBoolean; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.index.mapper.string.SimpleStringMappingTests.docValuesType; +import static org.elasticsearch.test.VersionUtils.randomVersionBetween; import static org.hamcrest.Matchers.*; public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest { @@ -147,12 +150,21 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest { int i = 0; private DocumentMapper mapper(String type, String mapping) throws IOException { + return mapper(type, mapping, Version.CURRENT); + } + + private DocumentMapper mapper(String type, String mapping, Version version) throws IOException { final String indexName = "test-" + (i++); - IndexService index = createIndex(indexName); + IndexService index; + if (version.equals(Version.CURRENT)) { + index = createIndex(indexName); + } else { + index = createIndex(indexName, settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build()); + } client().admin().indices().preparePutMapping(indexName).setType(type).setSource(mapping).get(); return index.mapperService().documentMapper(type); } - + private void assertNumericTokensEqual(ParsedDocument doc, DocumentMapper defaultMapper, String fieldA, String fieldB) throws IOException { assertThat(doc.rootDoc().getField(fieldA).tokenStream(defaultMapper.mappers().indexAnalyzer(), null), notNullValue()); assertThat(doc.rootDoc().getField(fieldB).tokenStream(defaultMapper.mappers().indexAnalyzer(), null), notNullValue()); @@ -181,15 +193,15 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest { .endObject().endObject().string(); DocumentMapper defaultMapper = mapper("type", mapping); - long value = System.currentTimeMillis(); + ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder() .startObject() .field("date_field", value) .endObject() .bytes()); - assertThat(doc.rootDoc().getField("date_field").tokenStream(defaultMapper.mappers().indexAnalyzer(), null), notNullValue()); + assertThat(doc.rootDoc().getField("date_field").tokenStream(defaultMapper.mappers().indexAnalyzer(), null), notNullValue()); } public void testDateDetection() throws Exception { @@ -290,7 +302,8 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest { .endObject() .bytes()); } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(MapperParsingException.class)); + assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), is("failed to parse [field2]")); } // Verify that the default is false @@ -301,7 +314,8 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest { .endObject() .bytes()); } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(MapperParsingException.class)); + assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), is("failed to parse [field3]")); } // Unless the global ignore_malformed option is set to true @@ -322,7 +336,8 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest { .endObject() .bytes()); } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(MapperParsingException.class)); + assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), is("failed to parse [field2]")); } } @@ -399,12 +414,12 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest { throw new AssertionError("missing"); } - public void testNumericResolution() throws Exception { + public void testNumericResolutionBackwardsCompat() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("date_field").field("type", "date").field("format", "date_time").field("numeric_resolution", "seconds").endObject().endObject() .endObject().endObject().string(); - DocumentMapper defaultMapper = mapper("type", mapping); + DocumentMapper defaultMapper = mapper("type", mapping, Version.V_0_90_0); // provided as an int ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder() @@ -429,6 +444,16 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest { .endObject() .bytes()); assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(44000L)); + + // expected to fail due to field epoch date formatters not being set + DocumentMapper currentMapper = mapper("type", mapping); + try { + currentMapper.parse("type", "2", XContentFactory.jsonBuilder() + .startObject() + .field("date_field", randomBoolean() ? "43" : 43) + .endObject() + .bytes()); + } catch (MapperParsingException e) {} } public void testThatEpochCanBeIgnoredWithCustomFormat() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java index 06086c6adc0..784811b26f7 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java @@ -38,12 +38,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.Index; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MergeResult; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.index.mapper.*; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; import org.elasticsearch.test.ElasticsearchSingleNodeTest; import org.junit.Test; @@ -57,7 +52,9 @@ import java.util.Map; import static org.elasticsearch.Version.V_1_5_0; import static org.elasticsearch.Version.V_2_0_0; +import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.VersionUtils.randomVersion; +import static org.elasticsearch.test.VersionUtils.randomVersionBetween; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -249,15 +246,15 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { .field("foo", "bar") .endObject(); - MetaData metaData = MetaData.builder().build(); DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(mapping.string()); + MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); MappingMetaData mappingMetaData = new MappingMetaData(docMapper); IndexRequest request = new IndexRequest("test", "type", "1").source(doc); request.process(metaData, mappingMetaData, true, "test"); assertThat(request.timestamp(), notNullValue()); - assertThat(request.timestamp(), is(MappingMetaData.Timestamp.parseStringTimestamp("1970-01-01", Joda.forPattern("YYYY-MM-dd")))); + assertThat(request.timestamp(), is(MappingMetaData.Timestamp.parseStringTimestamp("1970-01-01", Joda.forPattern("YYYY-MM-dd"), Version.CURRENT))); } @Test // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] @@ -274,15 +271,15 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { .field("foo", "bar") .endObject(); - MetaData metaData = MetaData.builder().build(); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping.string()); + MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); MappingMetaData mappingMetaData = new MappingMetaData(docMapper); IndexRequest request = new IndexRequest("test", "type", "1").source(doc); request.process(metaData, mappingMetaData, true, "test"); assertThat(request.timestamp(), notNullValue()); - assertThat(request.timestamp(), is(MappingMetaData.Timestamp.parseStringTimestamp("1970-01-01", Joda.forPattern("YYYY-MM-dd")))); + assertThat(request.timestamp(), is(MappingMetaData.Timestamp.parseStringTimestamp("1970-01-01", Joda.forPattern("YYYY-MM-dd"), Version.CURRENT))); } @Test // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] @@ -751,11 +748,12 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { .startObject("_timestamp").field("enabled", true).field("path", "custom_timestamp").endObject() .endObject().endObject().string(); DocumentMapper docMapper = createIndex("test", BWC_SETTINGS).mapperService().documentMapperParser().parse(mapping); + MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("custom_timestamp", 1).endObject(); MappingMetaData mappingMetaData = new MappingMetaData(docMapper); IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - request.process(MetaData.builder().build(), mappingMetaData, true, "test"); + request.process(metaData, mappingMetaData, true, "test"); assertEquals(request.timestamp(), "1"); } @@ -766,28 +764,69 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { .endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping); + MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("_timestamp", 2000000).endObject(); MappingMetaData mappingMetaData = new MappingMetaData(docMapper); IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - request.process(MetaData.builder().build(), mappingMetaData, true, "test"); + request.process(metaData, mappingMetaData, true, "test"); // _timestamp in a document never worked, so backcompat is ignoring the field - assertEquals(MappingMetaData.Timestamp.parseStringTimestamp("1970", Joda.forPattern("YYYY")), request.timestamp()); + assertEquals(MappingMetaData.Timestamp.parseStringTimestamp("1970", Joda.forPattern("YYYY"), Version.V_1_4_2), request.timestamp()); assertNull(docMapper.parse("type", "1", doc.bytes()).rootDoc().get("_timestamp")); } public void testThatEpochCanBeIgnoredWithCustomFormat() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true).field("format", "yyyyMMddHH").endObject() + .startObject("_timestamp").field("enabled", true).field("format", "yyyyMMddHH").endObject() .endObject().endObject().string(); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); XContentBuilder doc = XContentFactory.jsonBuilder().startObject().endObject(); IndexRequest request = new IndexRequest("test", "type", "1").source(doc).timestamp("2015060210"); MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - request.process(MetaData.builder().build(), mappingMetaData, true, "test"); + request.process(metaData, mappingMetaData, true, "test"); assertThat(request.timestamp(), is("1433239200000")); } + + public void testThatIndicesBefore2xMustSupportUnixTimestampsInAnyDateFormat() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("_timestamp").field("enabled", true).field("format", "dateOptionalTime").endObject() + .endObject().endObject().string(); + + BytesReference source = XContentFactory.jsonBuilder().startObject().field("field", "value").endObject().bytes(); + + // + // test with older versions + Settings oldSettings = settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, randomVersionBetween(random(), Version.V_0_90_0, Version.V_1_6_0)).build(); + DocumentMapper docMapper = createIndex("old-index", oldSettings).mapperService().documentMapperParser().parse(mapping); + + MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); + + // both index request are successfully processed + IndexRequest oldIndexDateIndexRequest = new IndexRequest("old-index", "type", "1").source(source).timestamp("1970-01-01"); + oldIndexDateIndexRequest.process(metaData, new MappingMetaData(docMapper), true, "old-index"); + IndexRequest oldIndexTimestampIndexRequest = new IndexRequest("old-index", "type", "1").source(source).timestamp("1234567890"); + oldIndexTimestampIndexRequest.process(metaData, new MappingMetaData(docMapper), true, "old-index"); + + // + // test with 2.x + DocumentMapper currentMapper = createIndex("new-index").mapperService().documentMapperParser().parse(mapping); + MetaData newMetaData = client().admin().cluster().prepareState().get().getState().getMetaData(); + + // this works with 2.x + IndexRequest request = new IndexRequest("new-index", "type", "1").source(source).timestamp("1970-01-01"); + request.process(newMetaData, new MappingMetaData(currentMapper), true, "new-index"); + + // this fails with 2.x + request = new IndexRequest("new-index", "type", "1").source(source).timestamp("1234567890"); + try { + request.process(newMetaData, new MappingMetaData(currentMapper), true, "new-index"); + } catch (Exception e) { + assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), containsString("failed to parse timestamp [1234567890]")); + } + } } diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorTests.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorTests.java index 1a02ff0b5b6..699c7e19286 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorTests.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorTests.java @@ -1813,7 +1813,7 @@ public class PercolatorTests extends ElasticsearchIntegrationTest { @Test public void testPercolatorQueryWithNowRange() throws Exception { client().admin().indices().prepareCreate("test") - .addMapping("my-type", "timestamp", "type=date") + .addMapping("my-type", "timestamp", "type=date,format=epoch_millis") .get(); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreTests.java b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreTests.java index c3a71bf7931..aef01106e9e 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreTests.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreTests.java @@ -449,7 +449,7 @@ public class DecayFunctionScoreTests extends ElasticsearchIntegrationTest { assertAcked(prepareCreate("test").addMapping( "type1", jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "string") - .endObject().startObject("num1").field("type", "date").endObject().endObject().endObject().endObject())); + .endObject().startObject("num1").field("type", "date").field("format", "epoch_millis").endObject().endObject().endObject().endObject())); ensureYellow(); client().index( indexRequest("test").type("type1").id("1") diff --git a/dev-tools/build_release.py b/dev-tools/build_release.py index af3baed7e7d..8ae300d98fd 100644 --- a/dev-tools/build_release.py +++ b/dev-tools/build_release.py @@ -6,10 +6,10 @@ # not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on +# software distributed under the License is distributed on # an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # either express or implied. See the License for the specific # language governing permissions and limitations under the License. @@ -19,119 +19,146 @@ import tempfile import shutil import os import datetime -import argparse -import github3 -import smtplib +import json +import time import sys +import argparse +import hmac +import urllib +import fnmatch +import socket +import urllib.request +import subprocess -from email.mime.multipart import MIMEMultipart -from email.mime.text import MIMEText +from functools import partial +from http.client import HTTPConnection +from http.client import HTTPSConnection -from os.path import dirname, abspath -""" - This tool builds a release from the a given elasticsearch plugin branch. +""" + This tool builds a release from the a given elasticsearch branch. In order to execute it go in the top level directory and run: - $ python3 dev_tools/build_release.py --branch master --publish --remote origin + $ python3 dev_tools/build_release.py --branch 0.90 --publish --remote origin By default this script runs in 'dry' mode which essentially simulates a release. If the - '--publish' option is set the actual release is done. - - $ python3 dev_tools/build_release.py --publish --remote origin - - The script takes over almost all + '--publish' option is set the actual release is done. The script takes over almost all steps necessary for a release from a high level point of view it does the following things: - - run prerequisite checks + - run prerequisit checks ie. check for Java 1.7 being presend or S3 credentials available as env variables - detect the version to release from the specified branch (--branch) or the current branch - - creates a version release branch & updates pom.xml to point to a release version rather than a snapshot - - builds the artifacts - - commits the new version and merges the version release branch into the source branch - - merges the master release branch into the master branch - - creates a tag and pushes branch and master to the specified origin (--remote) - - publishes the releases to sonatype + - creates a release branch & updates pom.xml and Version.java to point to a release version rather than a snapshot + - builds the artifacts and runs smoke-tests on the build zip & tar.gz files + - commits the new version and merges the release branch into the source branch + - creates a tag and pushes the commit to the specified origin (--remote) + - publishes the releases to Sonatype and S3 Once it's done it will print all the remaining steps. Prerequisites: - Python 3k for script execution + - Boto for S3 Upload ($ apt-get install python-boto) + - RPM for RPM building ($ apt-get install rpm) + - S3 keys exported via ENV variables (AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) + - GPG data exported via ENV variables (GPG_KEY_ID, GPG_PASSPHRASE, optionally GPG_KEYRING) + - S3 target repository via ENV variables (S3_BUCKET_SYNC_TO, optionally S3_BUCKET_SYNC_FROM) """ env = os.environ +PLUGINS = [('license', 'elasticsearch/license/latest'), + ('bigdesk', 'lukas-vlcek/bigdesk'), + ('paramedic', 'karmi/elasticsearch-paramedic'), + ('segmentspy', 'polyfractal/elasticsearch-segmentspy'), + ('inquisitor', 'polyfractal/elasticsearch-inquisitor'), + ('head', 'mobz/elasticsearch-head')] + LOG = env.get('ES_RELEASE_LOG', '/tmp/elasticsearch_release.log') -ROOT_DIR = abspath(os.path.join(abspath(dirname(__file__)), '../')) -POM_FILE = ROOT_DIR + '/pom.xml' -########################################################## -# -# Utility methods (log and run) -# -########################################################## -# Log a message +# console colors +COLOR_OK = '\033[92m' +COLOR_END = '\033[0m' +COLOR_FAIL = '\033[91m' + def log(msg): - log_plain('\n%s' % msg) + log_plain('\n%s' % msg) - -# Purge the log file -def purge_log(): - try: - os.remove(LOG) - except FileNotFoundError: - pass - - -# Log a message to the LOG file def log_plain(msg): - f = open(LOG, mode='ab') - f.write(msg.encode('utf-8')) - f.close() + f = open(LOG, mode='ab') + f.write(msg.encode('utf-8')) + f.close() - -# Run a command and log it def run(command, quiet=False): - log('%s: RUN: %s\n' % (datetime.datetime.now(), command)) - if os.system('%s >> %s 2>&1' % (command, LOG)): - msg = ' FAILED: %s [see log %s]' % (command, LOG) - if not quiet: - print(msg) - raise RuntimeError(msg) + log('%s: RUN: %s\n' % (datetime.datetime.now(), command)) + if os.system('%s >> %s 2>&1' % (command, LOG)): + msg = ' FAILED: %s [see log %s]' % (command, LOG) + if not quiet: + print(msg) + raise RuntimeError(msg) -########################################################## -# -# Clean logs and check JAVA and Maven -# -########################################################## try: - purge_log() - JAVA_HOME = env['JAVA_HOME'] + JAVA_HOME = env['JAVA_HOME'] except KeyError: - raise RuntimeError(""" + raise RuntimeError(""" Please set JAVA_HOME in the env before running release tool On OSX use: export JAVA_HOME=`/usr/libexec/java_home -v '1.7*'`""") try: - MVN = 'mvn' - # make sure mvn3 is used if mvn3 is available - # some systems use maven 2 as default - run('mvn3 --version', quiet=True) - MVN = 'mvn3' -except RuntimeError: - pass + JAVA_HOME = env['JAVA7_HOME'] +except KeyError: + pass #no JAVA7_HOME - we rely on JAVA_HOME +try: + # make sure mvn3 is used if mvn3 is available + # some systems use maven 2 as default + subprocess.check_output('mvn3 --version', shell=True, stderr=subprocess.STDOUT) + MVN = 'mvn3' +except subprocess.CalledProcessError: + MVN = 'mvn' + def java_exe(): - path = JAVA_HOME - return 'export JAVA_HOME="%s" PATH="%s/bin:$PATH" JAVACMD="%s/bin/java"' % (path, path, path) + path = JAVA_HOME + return 'export JAVA_HOME="%s" PATH="%s/bin:$PATH" JAVACMD="%s/bin/java"' % (path, path, path) +def verify_java_version(version): + s = os.popen('%s; java -version 2>&1' % java_exe()).read() + if ' version "%s.' % version not in s: + raise RuntimeError('got wrong version for java %s:\n%s' % (version, s)) + +# Verifies the java version. We guarantee that we run with Java 1.7 +# If 1.7 is not available fail the build! +def verify_mvn_java_version(version, mvn): + s = os.popen('%s; %s --version 2>&1' % (java_exe(), mvn)).read() + if 'Java version: %s' % version not in s: + raise RuntimeError('got wrong java version for %s %s:\n%s' % (mvn, version, s)) + +# Returns the hash of the current git HEAD revision +def get_head_hash(): + return os.popen(' git rev-parse --verify HEAD 2>&1').read().strip() + +# Returns the hash of the given tag revision +def get_tag_hash(tag): + return os.popen('git show-ref --tags %s --hash 2>&1' % (tag)).read().strip() + +# Returns the name of the current branch +def get_current_branch(): + return os.popen('git rev-parse --abbrev-ref HEAD 2>&1').read().strip() -########################################################## -# -# String and file manipulation utils -# -########################################################## # Utility that returns the name of the release branch for a given version -def release_branch(branchsource, version): - return 'release_branch_%s_%s' % (branchsource, version) +def release_branch(version): + return 'release_branch_%s' % version + +# runs get fetch on the given remote +def fetch(remote): + run('git fetch %s' % remote) + +# Creates a new release branch from the given source branch +# and rebases the source branch from the remote before creating +# the release branch. Note: This fails if the source branch +# doesn't exist on the provided remote. +def create_release_branch(remote, src_branch, release): + run('git checkout %s' % src_branch) + run('git pull --rebase %s %s' % (remote, src_branch)) + run('git checkout -b %s' % (release_branch(release))) # Reads the given file and applies the @@ -139,204 +166,367 @@ def release_branch(branchsource, version): # a line the given file is replaced with # the modified input. def process_file(file_path, line_callback): - fh, abs_path = tempfile.mkstemp() - modified = False - with open(abs_path, 'w', encoding='utf-8') as new_file: - with open(file_path, encoding='utf-8') as old_file: - for line in old_file: - new_line = line_callback(line) - modified = modified or (new_line != line) - new_file.write(new_line) - os.close(fh) - if modified: - #Remove original file - os.remove(file_path) - #Move new file - shutil.move(abs_path, file_path) - return True - else: - # nothing to do - just remove the tmp file - os.remove(abs_path) - return False - - -# Split a version x.y.z as an array of digits [x,y,z] -def split_version_to_digits(version): - return list(map(int, re.findall(r'\d+', version))) - - -# Guess the next snapshot version number (increment last digit) -def guess_snapshot(version): - digits = split_version_to_digits(version) - source = '%s.%s.%s' % (digits[0], digits[1], digits[2]) - destination = '%s.%s.%s' % (digits[0], digits[1], digits[2] + 1) - return version.replace(source, destination) - - -# Guess the anchor in generated documentation -# Looks like this "#version-230-for-elasticsearch-13" -def get_doc_anchor(release, esversion): - plugin_digits = split_version_to_digits(release) - es_digits = split_version_to_digits(esversion) - return '#version-%s%s%s-for-elasticsearch-%s%s' % ( - plugin_digits[0], plugin_digits[1], plugin_digits[2], es_digits[0], es_digits[1]) + fh, abs_path = tempfile.mkstemp() + modified = False + with open(abs_path,'w', encoding='utf-8') as new_file: + with open(file_path, encoding='utf-8') as old_file: + for line in old_file: + new_line = line_callback(line) + modified = modified or (new_line != line) + new_file.write(new_line) + os.close(fh) + if modified: + #Remove original file + os.remove(file_path) + #Move new file + shutil.move(abs_path, file_path) + return True + else: + # nothing to do - just remove the tmp file + os.remove(abs_path) + return False +# Walks the given directory path (defaults to 'docs') +# and replaces all 'coming[$version]' tags with +# 'added[$version]'. This method only accesses asciidoc files. +def update_reference_docs(release_version, path='docs'): + pattern = 'coming[%s' % (release_version) + replacement = 'added[%s' % (release_version) + pending_files = [] + def callback(line): + return line.replace(pattern, replacement) + for root, _, file_names in os.walk(path): + for file_name in fnmatch.filter(file_names, '*.asciidoc'): + full_path = os.path.join(root, file_name) + if process_file(full_path, callback): + pending_files.append(os.path.join(root, file_name)) + return pending_files # Moves the pom.xml file from a snapshot to a release def remove_maven_snapshot(pom, release): - pattern = '%s-SNAPSHOT' % release - replacement = '%s' % release + pattern = '%s-SNAPSHOT' % (release) + replacement = '%s' % (release) + def callback(line): + return line.replace(pattern, replacement) + process_file(pom, callback) - def callback(line): - return line.replace(pattern, replacement) +# Moves the Version.java file from a snapshot to a release +def remove_version_snapshot(version_file, release): + # 1.0.0.Beta1 -> 1_0_0_Beta1 + release = release.replace('.', '_') + pattern = 'new Version(V_%s_ID, true' % (release) + replacement = 'new Version(V_%s_ID, false' % (release) + def callback(line): + return line.replace(pattern, replacement) + process_file(version_file, callback) - process_file(pom, callback) +# Stages the given files for the next git commit +def add_pending_files(*files): + for file in files: + run('git add %s' % (file)) +# Executes a git commit with 'release [version]' as the commit message +def commit_release(release): + run('git commit -m "release [%s]"' % release) -# Moves the pom.xml file to the next snapshot -def add_maven_snapshot(pom, release, snapshot): - pattern = '%s' % release - replacement = '%s-SNAPSHOT' % snapshot +def commit_feature_flags(release): + run('git commit -m "Update Documentation Feature Flags [%s]"' % release) - def callback(line): - return line.replace(pattern, replacement) +def tag_release(release): + run('git tag -a v%s -m "Tag release version %s"' % (release, release)) - process_file(pom, callback) +def run_mvn(*cmd): + for c in cmd: + run('%s; %s %s' % (java_exe(), MVN, c)) +def build_release(run_tests=False, dry_run=True, cpus=1, bwc_version=None): + target = 'deploy' + if dry_run: + target = 'package' + if run_tests: + run_mvn('clean', + 'test -Dtests.jvms=%s -Des.node.mode=local' % (cpus), + 'test -Dtests.jvms=%s -Des.node.mode=network' % (cpus)) + if bwc_version: + print('Running Backwards compatibility tests against version [%s]' % (bwc_version)) + run_mvn('clean', 'test -Dtests.filter=@backwards -Dtests.bwc.version=%s -Dtests.bwc=true -Dtests.jvms=1' % bwc_version) + run_mvn('clean test-compile -Dforbidden.test.signatures="org.apache.lucene.util.LuceneTestCase\$AwaitsFix @ Please fix all bugs before release"') + gpg_args = '-Dgpg.key="%s" -Dgpg.passphrase="%s" -Ddeb.sign=true' % (env.get('GPG_KEY_ID'), env.get('GPG_PASSPHRASE')) + if env.get('GPG_KEYRING'): + gpg_args += ' -Dgpg.keyring="%s"' % env.get('GPG_KEYRING') + run_mvn('clean %s -DskipTests %s' % (target, gpg_args)) + success = False + try: + run_mvn('-DskipTests rpm:rpm %s' % (gpg_args)) + success = True + finally: + if not success: + print(""" + RPM Bulding failed make sure "rpm" tools are installed. + Use on of the following commands to install: + $ brew install rpm # on OSX + $ apt-get install rpm # on Ubuntu et.al + """) -# Checks the pom.xml for the release version. 2.0.0-SNAPSHOT +# Uses the github API to fetch open tickets for the given release version +# if it finds any tickets open for that version it will throw an exception +def ensure_no_open_tickets(version): + version = "v%s" % version + conn = HTTPSConnection('api.github.com') + try: + log('Checking for open tickets on Github for version %s' % version) + log('Check if node is available') + conn.request('GET', '/repos/elastic/elasticsearch/issues?state=open&labels=%s' % version, headers= {'User-Agent' : 'Elasticsearch version checker'}) + res = conn.getresponse() + if res.status == 200: + issues = json.loads(res.read().decode("utf-8")) + if issues: + urls = [] + for issue in issues: + urls.append(issue['html_url']) + raise RuntimeError('Found open issues for release version %s:\n%s' % (version, '\n'.join(urls))) + else: + log("No open issues found for version %s" % version) + else: + raise RuntimeError('Failed to fetch issue list from Github for release version %s' % version) + except socket.error as e: + log("Failed to fetch issue list from Github for release version %s' % version - Exception: [%s]" % (version, e)) + #that is ok it might not be there yet + finally: + conn.close() + +def wait_for_node_startup(host='127.0.0.1', port=9200,timeout=15): + for _ in range(timeout): + conn = HTTPConnection(host, port, timeout) + try: + log('Waiting until node becomes available for 1 second') + time.sleep(1) + log('Check if node is available') + conn.request('GET', '') + res = conn.getresponse() + if res.status == 200: + return True + except socket.error as e: + log("Failed while waiting for node - Exception: [%s]" % e) + #that is ok it might not be there yet + finally: + conn.close() + + return False + +# Ensures we are using a true Lucene release, not a snapshot build: +def verify_lucene_version(): + s = open('pom.xml', encoding='utf-8').read() + if 'download.elastic.co/lucenesnapshots' in s: + raise RuntimeError('pom.xml contains download.elastic.co/lucenesnapshots repository: remove that before releasing') + + m = re.search(r'(.*?)', s) + if m is None: + raise RuntimeError('unable to locate lucene.version in pom.xml') + lucene_version = m.group(1) + + m = re.search(r'(.*?)', s) + if m is None: + raise RuntimeError('unable to locate lucene.maven.version in pom.xml') + lucene_maven_version = m.group(1) + if lucene_version != lucene_maven_version: + raise RuntimeError('pom.xml is still using a snapshot release of lucene (%s): cutover to a real lucene release before releasing' % lucene_maven_version) + +# Checks the pom.xml for the release version. # This method fails if the pom file has no SNAPSHOT version set ie. # if the version is already on a release version we fail. # Returns the next version string ie. 0.90.7 def find_release_version(src_branch): - git_checkout(src_branch) - with open(POM_FILE, encoding='utf-8') as file: - for line in file: - match = re.search(r'(.+)-SNAPSHOT', line) - if match: - return match.group(1) - raise RuntimeError('Could not find release version in branch %s' % src_branch) + run('git checkout %s' % src_branch) + with open('pom.xml', encoding='utf-8') as file: + for line in file: + match = re.search(r'(.+)-SNAPSHOT', line) + if match: + return match.group(1) + raise RuntimeError('Could not find release version in branch %s' % src_branch) +def artifact_names(release, path = ''): + return [os.path.join(path, 'elasticsearch-%s.%s' % (release, t)) for t in ['deb', 'tar.gz', 'zip']] -# extract a value from pom.xml -def find_from_pom(tag): - with open(POM_FILE, encoding='utf-8') as file: - for line in file: - match = re.search(r'<%s>(.+)' % (tag, tag), line) - if match: - return match.group(1) - raise RuntimeError('Could not find <%s> in pom.xml file' % (tag)) +def get_artifacts(release): + common_artifacts = artifact_names(release, 'target/releases/') + for f in common_artifacts: + if not os.path.isfile(f): + raise RuntimeError('Could not find required artifact at %s' % f) + rpm = os.path.join('target/rpm/elasticsearch/RPMS/noarch/', 'elasticsearch-%s-1.noarch.rpm' % release) + if os.path.isfile(rpm): + log('RPM [%s] contains: ' % rpm) + run('rpm -pqli %s' % rpm) + # this is an oddness of RPM that is attches -1 so we have to rename it + renamed_rpm = os.path.join('target/rpm/elasticsearch/RPMS/noarch/', 'elasticsearch-%s.noarch.rpm' % release) + shutil.move(rpm, renamed_rpm) + common_artifacts.append(renamed_rpm) + else: + raise RuntimeError('Could not find required artifact at %s' % rpm) + return common_artifacts +# Checks the jar files in each package +# Barfs if any of the package jar files differ +def check_artifacts_for_same_jars(artifacts): + jars = [] + for file in artifacts: + if file.endswith('.zip'): + jars.append(subprocess.check_output("unzip -l %s | grep '\.jar$' | awk -F '/' '{ print $NF }' | sort" % file, shell=True)) + if file.endswith('.tar.gz'): + jars.append(subprocess.check_output("tar tzvf %s | grep '\.jar$' | awk -F '/' '{ print $NF }' | sort" % file, shell=True)) + if file.endswith('.rpm'): + jars.append(subprocess.check_output("rpm -pqli %s | grep '\.jar$' | awk -F '/' '{ print $NF }' | sort" % file, shell=True)) + if file.endswith('.deb'): + jars.append(subprocess.check_output("dpkg -c %s | grep '\.jar$' | awk -F '/' '{ print $NF }' | sort" % file, shell=True)) + if len(set(jars)) != 1: + raise RuntimeError('JAR contents of packages are not the same, please check the package contents. Use [unzip -l], [tar tzvf], [dpkg -c], [rpm -pqli] to inspect') -########################################################## -# -# GIT commands -# -########################################################## -# Returns the hash of the current git HEAD revision -def get_head_hash(): - return os.popen('git rev-parse --verify HEAD 2>&1').read().strip() +# Generates sha1 checsums for all files +# and returns the checksum files as well +# as the given files in a list +def generate_checksums(files): + res = [] + for release_file in files: + directory = os.path.dirname(release_file) + file = os.path.basename(release_file) + checksum_file = '%s.sha1.txt' % file + if os.system('cd %s; shasum %s > %s' % (directory, file, checksum_file)): + raise RuntimeError('Failed to generate checksum for file %s' % release_file) + res = res + [os.path.join(directory, checksum_file), release_file] + return res -# Returns the name of the current branch -def get_current_branch(): - return os.popen('git rev-parse --abbrev-ref HEAD 2>&1').read().strip() - - -# runs get fetch on the given remote -def fetch(remote): - run('git fetch %s' % remote) - - -# Creates a new release branch from the given source branch -# and rebases the source branch from the remote before creating -# the release branch. Note: This fails if the source branch -# doesn't exist on the provided remote. -def create_release_branch(remote, src_branch, release): - git_checkout(src_branch) - run('git pull --rebase %s %s' % (remote, src_branch)) - run('git checkout -b %s' % (release_branch(src_branch, release))) - - -# Stages the given files for the next git commit -def add_pending_files(*files): +def download_and_verify(release, files, plugins=None, base_url='https://download.elastic.co/elasticsearch/elasticsearch'): + print('Downloading and verifying release %s from %s' % (release, base_url)) + tmp_dir = tempfile.mkdtemp() + try: + downloaded_files = [] for file in files: - run('git add %s' % file) + name = os.path.basename(file) + url = '%s/%s' % (base_url, name) + abs_file_path = os.path.join(tmp_dir, name) + print(' Downloading %s' % (url)) + downloaded_files.append(abs_file_path) + urllib.request.urlretrieve(url, abs_file_path) + url = ''.join([url, '.sha1.txt']) + checksum_file = os.path.join(tmp_dir, ''.join([abs_file_path, '.sha1.txt'])) + urllib.request.urlretrieve(url, checksum_file) + print(' Verifying checksum %s' % (checksum_file)) + run('cd %s && sha1sum -c %s' % (tmp_dir, os.path.basename(checksum_file))) + smoke_test_release(release, downloaded_files, get_tag_hash('v%s' % release), plugins) + print(' SUCCESS') + finally: + shutil.rmtree(tmp_dir) - -# Executes a git commit with 'release [version]' as the commit message -def commit_release(artifact_id, release): - run('git commit -m "prepare release %s-%s"' % (artifact_id, release)) - - -# Commit documentation changes on the master branch -def commit_master(release): - run('git commit -m "update documentation with release %s"' % release) - - -# Commit next snapshot files -def commit_snapshot(): - run('git commit -m "prepare for next development iteration"') - - -# Put the version tag on on the current commit -def tag_release(release): - run('git tag -a v%s -m "Tag release version %s"' % (release, release)) - - -# Checkout a given branch -def git_checkout(branch): - run('git checkout %s' % branch) - - -# Merge the release branch with the actual branch -def git_merge(src_branch, release_version): - git_checkout(src_branch) - run('git merge %s' % release_branch(src_branch, release_version)) - - -# Push the actual branch and master branch -def git_push(remote, src_branch, release_version, dry_run): - if not dry_run: - run('git push %s %s master' % (remote, src_branch)) # push the commit and the master - run('git push %s v%s' % (remote, release_version)) # push the tag +def smoke_test_release(release, files, expected_hash, plugins): + for release_file in files: + if not os.path.isfile(release_file): + raise RuntimeError('Smoketest failed missing file %s' % (release_file)) + tmp_dir = tempfile.mkdtemp() + if release_file.endswith('tar.gz'): + run('tar -xzf %s -C %s' % (release_file, tmp_dir)) + elif release_file.endswith('zip'): + run('unzip %s -d %s' % (release_file, tmp_dir)) else: - print(' dryrun [True] -- skipping push to remote %s %s master' % (remote, src_branch)) + log('Skip SmokeTest for [%s]' % release_file) + continue # nothing to do here + es_run_path = os.path.join(tmp_dir, 'elasticsearch-%s' % (release), 'bin/elasticsearch') + print(' Smoke testing package [%s]' % release_file) + es_plugin_path = os.path.join(tmp_dir, 'elasticsearch-%s' % (release),'bin/plugin') + plugin_names = {} + for name, plugin in plugins: + print(' Install plugin [%s] from [%s]' % (name, plugin)) + run('%s; %s %s %s' % (java_exe(), es_plugin_path, '-install', plugin)) + plugin_names[name] = True + if release.startswith("0.90."): + background = '' # 0.90.x starts in background automatically + else: + background = '-d' + print(' Starting elasticsearch deamon from [%s]' % os.path.join(tmp_dir, 'elasticsearch-%s' % release)) + run('%s; %s -Des.node.name=smoke_tester -Des.cluster.name=prepare_release -Des.discovery.zen.ping.multicast.enabled=false -Des.script.inline=on -Des.script.indexed=on %s' + % (java_exe(), es_run_path, background)) + conn = HTTPConnection('127.0.0.1', 9200, 20); + wait_for_node_startup() + try: + try: + conn.request('GET', '') + res = conn.getresponse() + if res.status == 200: + version = json.loads(res.read().decode("utf-8"))['version'] + if release != version['number']: + raise RuntimeError('Expected version [%s] but was [%s]' % (release, version['number'])) + if version['build_snapshot']: + raise RuntimeError('Expected non snapshot version') + if version['build_hash'].strip() != expected_hash: + raise RuntimeError('HEAD hash does not match expected [%s] but got [%s]' % (expected_hash, version['build_hash'])) + print(' Running REST Spec tests against package [%s]' % release_file) + run_mvn('test -Dtests.cluster=%s -Dtests.class=*.*RestTests' % ("127.0.0.1:9300")) + print(' Verify if plugins are listed in _nodes') + conn.request('GET', '/_nodes?plugin=true&pretty=true') + res = conn.getresponse() + if res.status == 200: + nodes = json.loads(res.read().decode("utf-8"))['nodes'] + for _, node in nodes.items(): + node_plugins = node['plugins'] + for node_plugin in node_plugins: + if not plugin_names.get(node_plugin['name'], False): + raise RuntimeError('Unexpeced plugin %s' % node_plugin['name']) + del plugin_names[node_plugin['name']] + if plugin_names: + raise RuntimeError('Plugins not loaded %s' % list(plugin_names.keys())) -########################################################## -# -# Maven commands -# -########################################################## -# Run a given maven command -def run_mvn(*cmd): - for c in cmd: - run('%s; %s -f %s %s' % (java_exe(), MVN, POM_FILE, c)) + else: + raise RuntimeError('Expected HTTP 200 but got %s' % res.status) + else: + raise RuntimeError('Expected HTTP 200 but got %s' % res.status) + finally: + conn.request('POST', '/_cluster/nodes/_local/_shutdown') + time.sleep(1) # give the node some time to shut down + if conn.getresponse().status != 200: + raise RuntimeError('Expected HTTP 200 but got %s on node shutdown' % res.status) + finally: + conn.close() + shutil.rmtree(tmp_dir) -# Run deploy or package depending on dry_run -# Default to run mvn package -# When run_tests=True a first mvn clean test is run -def build_release(run_tests=False, dry_run=True): - target = 'deploy' - tests = '-DskipTests' - if run_tests: - tests = '' +def merge_tag_push(remote, src_branch, release_version, dry_run): + run('git checkout %s' % src_branch) + run('git merge %s' % release_branch(release_version)) + run('git tag v%s' % release_version) + if not dry_run: + run('git push %s %s' % (remote, src_branch)) # push the commit + run('git push %s v%s' % (remote, release_version)) # push the tag + else: + print(' dryrun [True] -- skipping push to remote %s' % remote) + +def publish_artifacts(artifacts, base='elasticsearch/elasticsearch', dry_run=True): + location = os.path.dirname(os.path.realpath(__file__)) + for artifact in artifacts: if dry_run: - target = 'package' - run_mvn('clean %s %s' % (target, tests)) + print('Skip Uploading %s to Amazon S3' % artifact) + else: + print('Uploading %s to Amazon S3' % artifact) + # requires boto to be installed but it is not available on python3k yet so we use a dedicated tool + run('python %s/upload-s3.py --file %s ' % (location, os.path.abspath(artifact))) +def publish_repositories(version, dry_run=True): + if dry_run: + print('Skipping package repository update') + else: + print('Triggering repository update - calling dev-tools/build_repositories.sh %s' % version) + # src_branch is a version like 1.5/1.6/2.0/etc.. so we can use this + run('dev-tools/build_repositories.sh %s' % src_branch) def print_sonatype_notice(): - settings = os.path.join(os.path.expanduser('~'), '.m2/settings.xml') - if os.path.isfile(settings): - with open(settings, encoding='utf-8') as settings_file: - for line in settings_file: - if line.strip() == 'sonatype-nexus-snapshots': - # moving out - we found the indicator no need to print the warning - return - print(""" + settings = os.path.join(os.path.expanduser('~'), '.m2/settings.xml') + if os.path.isfile(settings): + with open(settings, encoding='utf-8') as settings_file: + for line in settings_file: + if line.strip() == 'sonatype-nexus-snapshots': + # moving out - we found the indicator no need to print the warning + return + print(""" NOTE: No sonatype settings detected, make sure you have configured your sonatype credentials in '~/.m2/settings.xml': @@ -358,146 +548,256 @@ def print_sonatype_notice(): """) +def check_command_exists(name, cmd): + try: + subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) + except subprocess.CalledProcessError: + raise RuntimeError('Could not run command %s - please make sure it is installed' % (name)) -# we print a notice if we can not find the relevant infos in the ~/.m2/settings.xml -print_sonatype_notice() +VERSION_FILE = 'src/main/java/org/elasticsearch/Version.java' +POM_FILE = 'pom.xml' + +# finds the highest available bwc version to test against +def find_bwc_version(release_version, bwc_dir='backwards'): + log(' Lookup bwc version in directory [%s]' % bwc_dir) + bwc_version = None + if os.path.exists(bwc_dir) and os.path.isdir(bwc_dir): + max_version = [int(x) for x in release_version.split('.')] + for dir in os.listdir(bwc_dir): + if os.path.isdir(os.path.join(bwc_dir, dir)) and dir.startswith('elasticsearch-'): + version = [int(x) for x in dir[len('elasticsearch-'):].split('.')] + if version < max_version: # bwc tests only against smaller versions + if (not bwc_version) or version > [int(x) for x in bwc_version.split('.')]: + bwc_version = dir[len('elasticsearch-'):] + log(' Using bwc version [%s]' % bwc_version) + else: + log(' bwc directory [%s] does not exists or is not a directory - skipping' % bwc_dir) + return bwc_version + +def ensure_checkout_is_clean(branchName): + # Make sure no local mods: + s = subprocess.check_output('git diff --shortstat', shell=True) + if len(s) > 0: + raise RuntimeError('git diff --shortstat is non-empty: got:\n%s' % s) + + # Make sure no untracked files: + s = subprocess.check_output('git status', shell=True).decode('utf-8', errors='replace') + if 'Untracked files:' in s: + raise RuntimeError('git status shows untracked files: got:\n%s' % s) + + # Make sure we are on the right branch (NOTE: a bit weak, since we default to current branch): + if 'On branch %s' % branchName not in s: + raise RuntimeError('git status does not show branch %s: got:\n%s' % (branchName, s)) + + # Make sure we have all changes from origin: + if 'is behind' in s: + raise RuntimeError('git status shows not all changes pulled from origin; try running "git pull origin %s": got:\n%s' % (branchName, s)) + + # Make sure we no local unpushed changes (this is supposed to be a clean area): + if 'is ahead' in s: + raise RuntimeError('git status shows local commits; try running "git fetch origin", "git checkout %s", "git reset --hard origin/%s": got:\n%s' % (branchName, branchName, s)) + +# Checks all source files for //NORELEASE comments +def check_norelease(path='src'): + pattern = re.compile(r'\bnorelease\b', re.IGNORECASE) + for root, _, file_names in os.walk(path): + for file_name in fnmatch.filter(file_names, '*.java'): + full_path = os.path.join(root, file_name) + line_number = 0 + with open(full_path, 'r', encoding='utf-8') as current_file: + for line in current_file: + line_number = line_number + 1 + if pattern.search(line): + raise RuntimeError('Found //norelease comment in %s line %s' % (full_path, line_number)) + +def run_and_print(text, run_function): + try: + print(text, end='') + run_function() + print(COLOR_OK + 'OK' + COLOR_END) + return True + except RuntimeError: + print(COLOR_FAIL + 'NOT OK' + COLOR_END) + return False + +def check_env_var(text, env_var): + try: + print(text, end='') + env[env_var] + print(COLOR_OK + 'OK' + COLOR_END) + return True + except KeyError: + print(COLOR_FAIL + 'NOT OK' + COLOR_END) + return False + +def check_environment_and_commandline_tools(check_only): + checks = list() + checks.append(check_env_var('Checking for AWS env configuration AWS_SECRET_ACCESS_KEY_ID... ', 'AWS_SECRET_ACCESS_KEY')) + checks.append(check_env_var('Checking for AWS env configuration AWS_ACCESS_KEY_ID... ', 'AWS_ACCESS_KEY_ID')) + checks.append(check_env_var('Checking for SONATYPE env configuration SONATYPE_USERNAME... ', 'SONATYPE_USERNAME')) + checks.append(check_env_var('Checking for SONATYPE env configuration SONATYPE_PASSWORD... ', 'SONATYPE_PASSWORD')) + checks.append(check_env_var('Checking for GPG env configuration GPG_KEY_ID... ', 'GPG_KEY_ID')) + checks.append(check_env_var('Checking for GPG env configuration GPG_PASSPHRASE... ', 'GPG_PASSPHRASE')) + checks.append(check_env_var('Checking for S3 repo upload env configuration S3_BUCKET_SYNC_TO... ', 'S3_BUCKET_SYNC_TO')) + checks.append(check_env_var('Checking for git env configuration GIT_AUTHOR_NAME... ', 'GIT_AUTHOR_NAME')) + checks.append(check_env_var('Checking for git env configuration GIT_AUTHOR_EMAIL... ', 'GIT_AUTHOR_EMAIL')) + + checks.append(run_and_print('Checking command: rpm... ', partial(check_command_exists, 'rpm', 'rpm --version'))) + checks.append(run_and_print('Checking command: dpkg... ', partial(check_command_exists, 'dpkg', 'dpkg --version'))) + checks.append(run_and_print('Checking command: gpg... ', partial(check_command_exists, 'gpg', 'gpg --version'))) + checks.append(run_and_print('Checking command: expect... ', partial(check_command_exists, 'expect', 'expect -v'))) + checks.append(run_and_print('Checking command: createrepo... ', partial(check_command_exists, 'createrepo', 'createrepo --version'))) + checks.append(run_and_print('Checking command: s3cmd... ', partial(check_command_exists, 's3cmd', 's3cmd --version'))) + checks.append(run_and_print('Checking command: apt-ftparchive... ', partial(check_command_exists, 'apt-ftparchive', 'apt-ftparchive --version'))) + + # boto, check error code being returned + location = os.path.dirname(os.path.realpath(__file__)) + command = 'python %s/upload-s3.py -h' % (location) + checks.append(run_and_print('Testing boto python dependency... ', partial(check_command_exists, 'python-boto', command))) + + checks.append(run_and_print('Checking java version... ', partial(verify_java_version, '1.7'))) + checks.append(run_and_print('Checking java mvn version... ', partial(verify_mvn_java_version, '1.7', MVN))) + + if check_only: + sys.exit(0) + + if False in checks: + print("Exiting due to failing checks") + sys.exit(0) if __name__ == '__main__': - parser = argparse.ArgumentParser(description='Builds and publishes a Elasticsearch Plugin Release') - parser.add_argument('--branch', '-b', metavar='master', default=get_current_branch(), - help='The branch to release from. Defaults to the current branch.') - parser.add_argument('--skiptests', '-t', dest='tests', action='store_false', - help='Skips tests before release. Tests are run by default.') - parser.set_defaults(tests=True) - parser.add_argument('--remote', '-r', metavar='origin', default='origin', - help='The remote to push the release commit and tag to. Default is [origin]') - parser.add_argument('--publish', '-p', dest='dryrun', action='store_false', - help='Publishes the release. Disable by default.') - parser.add_argument('--disable_mail', '-dm', dest='mail', action='store_false', - help='Do not send a release email. Email is sent by default.') + parser = argparse.ArgumentParser(description='Builds and publishes a Elasticsearch Release') + parser.add_argument('--branch', '-b', metavar='RELEASE_BRANCH', default=get_current_branch(), + help='The branch to release from. Defaults to the current branch.') + parser.add_argument('--cpus', '-c', metavar='1', default=1, + help='The number of cpus to use for running the test. Default is [1]') + parser.add_argument('--skiptests', '-t', dest='tests', action='store_false', + help='Skips tests before release. Tests are run by default.') + parser.set_defaults(tests=True) + parser.add_argument('--remote', '-r', metavar='origin', default='origin', + help='The remote to push the release commit and tag to. Default is [origin]') + parser.add_argument('--publish', '-d', dest='dryrun', action='store_false', + help='Publishes the release. Disable by default.') + parser.add_argument('--smoke', '-s', dest='smoke', default='', + help='Smoke tests the given release') + parser.add_argument('--bwc', '-w', dest='bwc', metavar='backwards', default='backwards', + help='Backwards compatibility version path to use to run compatibility tests against') + parser.add_argument('--check-only', dest='check_only', action='store_true', + help='Checks and reports for all requirements and then exits') - parser.set_defaults(dryrun=True) - parser.set_defaults(mail=True) - args = parser.parse_args() + parser.set_defaults(dryrun=True) + parser.set_defaults(smoke=None) + parser.set_defaults(check_only=False) + args = parser.parse_args() + bwc_path = args.bwc + src_branch = args.branch + remote = args.remote + run_tests = args.tests + dry_run = args.dryrun + cpus = args.cpus + build = not args.smoke + smoke_test_version = args.smoke - src_branch = args.branch - remote = args.remote - run_tests = args.tests - dry_run = args.dryrun - mail = args.mail + check_environment_and_commandline_tools(args.check_only) - if src_branch == 'master': - raise RuntimeError('Can not release the master branch. You need to create another branch before a release') + # we print a notice if we can not find the relevant infos in the ~/.m2/settings.xml + print_sonatype_notice() - if not dry_run: - print('WARNING: dryrun is set to "false" - this will push and publish the release') - input('Press Enter to continue...') + # we require to build with 1.7 + verify_java_version('1.7') + verify_mvn_java_version('1.7', MVN) - print(''.join(['-' for _ in range(80)])) - print('Preparing Release from branch [%s] running tests: [%s] dryrun: [%s]' % (src_branch, run_tests, dry_run)) - print(' JAVA_HOME is [%s]' % JAVA_HOME) - print(' Running with maven command: [%s] ' % (MVN)) + if os.path.exists(LOG): + raise RuntimeError('please remove old release log %s first' % LOG) + if not dry_run: + print('WARNING: dryrun is set to "false" - this will push and publish the release') + input('Press Enter to continue...') + + print(''.join(['-' for _ in range(80)])) + print('Preparing Release from branch [%s] running tests: [%s] dryrun: [%s]' % (src_branch, run_tests, dry_run)) + print(' JAVA_HOME is [%s]' % JAVA_HOME) + print(' Running with maven command: [%s] ' % (MVN)) + if build: + check_norelease(path='src') + ensure_checkout_is_clean(src_branch) + verify_lucene_version() release_version = find_release_version(src_branch) - artifact_id = find_from_pom('artifactId') - artifact_name = find_from_pom('name') - artifact_description = find_from_pom('description') - elasticsearch_version = find_from_pom('elasticsearch.version') - print(' Artifact Id: [%s]' % artifact_id) - print(' Release version: [%s]' % release_version) - print(' Elasticsearch: [%s]' % elasticsearch_version) - if elasticsearch_version.find('-SNAPSHOT') != -1: - raise RuntimeError('Can not release with a SNAPSHOT elasticsearch dependency: %s' % elasticsearch_version) - - # extract snapshot - default_snapshot_version = guess_snapshot(release_version) - snapshot_version = input('Enter next snapshot version [%s]:' % default_snapshot_version) - snapshot_version = snapshot_version or default_snapshot_version - - print(' Next version: [%s-SNAPSHOT]' % snapshot_version) - print(' Artifact Name: [%s]' % artifact_name) - print(' Artifact Description: [%s]' % artifact_description) - + ensure_no_open_tickets(release_version) if not dry_run: - smoke_test_version = release_version - - try: - git_checkout(src_branch) - version_hash = get_head_hash() - run_mvn('clean') # clean the env! - create_release_branch(remote, src_branch, release_version) - print(' Created release branch [%s]' % (release_branch(src_branch, release_version))) - except RuntimeError: - print('Logs:') - with open(LOG, 'r') as log_file: - print(log_file.read()) - sys.exit(-1) - + smoke_test_version = release_version + head_hash = get_head_hash() + run_mvn('clean') # clean the env! + print(' Release version: [%s]' % release_version) + create_release_branch(remote, src_branch, release_version) + print(' Created release branch [%s]' % (release_branch(release_version))) success = False try: - ######################################## - # Start update process in version branch - ######################################## - pending_files = [POM_FILE] - remove_maven_snapshot(POM_FILE, release_version) - print(' Done removing snapshot version') - add_pending_files(*pending_files) # expects var args use * to expand - commit_release(artifact_id, release_version) - print(' Committed release version [%s]' % release_version) - print(''.join(['-' for _ in range(80)])) - print('Building Release candidate') - input('Press Enter to continue...') - if not dry_run: - print(' Running maven builds now and publish to sonatype - run-tests [%s]' % run_tests) - else: - print(' Running maven builds now run-tests [%s]' % run_tests) - build_release(run_tests=run_tests, dry_run=dry_run) - - print(''.join(['-' for _ in range(80)])) - - print('Finish Release -- dry_run: %s' % dry_run) - input('Press Enter to continue...') - - print(' merge release branch') - git_merge(src_branch, release_version) - print(' tag') - tag_release(release_version) - - add_maven_snapshot(POM_FILE, release_version, snapshot_version) - add_pending_files(*pending_files) - commit_snapshot() - - print(' push to %s %s -- dry_run: %s' % (remote, src_branch, dry_run)) - git_push(remote, src_branch, release_version, dry_run) - - pending_msg = """ -Release successful pending steps: - * close and release sonatype repo: https://oss.sonatype.org/ - * check if the release is there https://oss.sonatype.org/content/repositories/releases/org/elasticsearch/%(artifact_id)s/%(version)s -""" - print(pending_msg % {'version': release_version, - 'artifact_id': artifact_id}) - success = True + pending_files = [POM_FILE, VERSION_FILE] + remove_maven_snapshot(POM_FILE, release_version) + remove_version_snapshot(VERSION_FILE, release_version) + print(' Done removing snapshot version') + add_pending_files(*pending_files) # expects var args use * to expand + commit_release(release_version) + pending_files = update_reference_docs(release_version) + version_head_hash = None + # split commits for docs and version to enable easy cherry-picking + if pending_files: + add_pending_files(*pending_files) # expects var args use * to expand + commit_feature_flags(release_version) + version_head_hash = get_head_hash() + print(' Committed release version [%s]' % release_version) + print(''.join(['-' for _ in range(80)])) + print('Building Release candidate') + input('Press Enter to continue...') + if not dry_run: + print(' Running maven builds now and publish to Sonatype - run-tests [%s]' % run_tests) + else: + print(' Running maven builds now run-tests [%s]' % run_tests) + build_release(run_tests=run_tests, dry_run=dry_run, cpus=cpus, bwc_version=find_bwc_version(release_version, bwc_path)) + artifacts = get_artifacts(release_version) + print('Checking if all artifacts contain the same jars') + check_artifacts_for_same_jars(artifacts) + artifacts_and_checksum = generate_checksums(artifacts) + smoke_test_release(release_version, artifacts, get_head_hash(), PLUGINS) + print(''.join(['-' for _ in range(80)])) + print('Finish Release -- dry_run: %s' % dry_run) + input('Press Enter to continue...') + print(' merge release branch, tag and push to %s %s -- dry_run: %s' % (remote, src_branch, dry_run)) + merge_tag_push(remote, src_branch, release_version, dry_run) + print(' publish artifacts to S3 -- dry_run: %s' % dry_run) + publish_artifacts(artifacts_and_checksum, dry_run=dry_run) + print(' Updating package repositories -- dry_run: %s' % dry_run) + publish_repositories(src_branch, dry_run=dry_run) + cherry_pick_command = '.' + if version_head_hash: + cherry_pick_command = ' and cherry-pick the documentation changes: \'git cherry-pick %s\' to the development branch' % (version_head_hash) + pending_msg = """ + Release successful pending steps: + * create a new vX.Y.Z label on github for the next release, with label color #dddddd (https://github.com/elastic/elasticsearch/labels) + * publish the maven artifacts on Sonatype: https://oss.sonatype.org/index.html + - here is a guide: http://central.sonatype.org/pages/releasing-the-deployment.html + * check if the release is there https://oss.sonatype.org/content/repositories/releases/org/elasticsearch/elasticsearch/%(version)s + * announce the release on the website / blog post + * tweet about the release + * announce the release in the google group/mailinglist + * Move to a Snapshot version to the current branch for the next point release%(cherry_pick)s + """ + print(pending_msg % { 'version' : release_version, 'cherry_pick' : cherry_pick_command} ) + success = True finally: - if not success: - print('Logs:') - with open(LOG, 'r') as log_file: - print(log_file.read()) - git_checkout(src_branch) - run('git reset --hard %s' % version_hash) - try: - run('git tag -d v%s' % release_version) - except RuntimeError: - pass - elif dry_run: - print('End of dry_run') - input('Press Enter to reset changes...') - git_checkout(src_branch) - run('git reset --hard %s' % version_hash) - run('git tag -d v%s' % release_version) - - # we delete this one anyways - run('git branch -D %s' % (release_branch(src_branch, release_version))) - - # Checkout the branch we started from - git_checkout(src_branch) + if not success: + run('git reset --hard HEAD') + run('git checkout %s' % src_branch) + elif dry_run: + run('git reset --hard %s' % head_hash) + run('git tag -d v%s' % release_version) + # we delete this one anyways + run('git branch -D %s' % (release_branch(release_version))) + else: + print("Skipping build - smoketest only against version %s" % smoke_test_version) + run_mvn('clean') # clean the env! + + if smoke_test_version: + fetch(remote) + download_and_verify(smoke_test_version, artifact_names(smoke_test_version), plugins=PLUGINS) diff --git a/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc b/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc index a965716269e..8b1f58f7ff0 100644 --- a/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc @@ -58,7 +58,7 @@ Response: [[date-format-pattern]] ==== Date Format/Pattern -NOTE: this information was copied from http://joda-time.sourceforge.net/apidocs/org/joda/time/format/DateTimeFormat.html[JodaDate] +NOTE: this information was copied from http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html[JodaDate] All ASCII letters are reserved as format pattern letters, which are defined as follows: diff --git a/docs/reference/mapping/date-format.asciidoc b/docs/reference/mapping/date-format.asciidoc index a10917ade73..0972340c555 100644 --- a/docs/reference/mapping/date-format.asciidoc +++ b/docs/reference/mapping/date-format.asciidoc @@ -8,9 +8,9 @@ specifying `dynamic_date_formats` in the `root object` mapping (which will be used unless explicitly overridden by a `date` type). There are built in formats supported, as well as complete custom one. -The parsing of dates uses http://joda-time.sourceforge.net/[Joda]. The +The parsing of dates uses http://www.joda.org/joda-time/[Joda]. The default date parsing used if no format is specified is -http://joda-time.sourceforge.net/api-release/org/joda/time/format/ISODateTimeFormat.html#dateOptionalTimeParser()[ISODateTimeFormat.dateOptionalTimeParser]. +http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateOptionalTimeParser--[ISODateTimeFormat.dateOptionalTimeParser]. An extension to the format allow to define several formats using `||` separator. This allows to define less strict formats that can be used, @@ -215,4 +215,4 @@ date formatter in that case. === Custom Format Allows for a completely customizable date format explained -http://joda-time.sourceforge.net/api-release/org/joda/time/format/DateTimeFormat.html[here]. +http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html[here]. diff --git a/docs/reference/mapping/types/core-types.asciidoc b/docs/reference/mapping/types/core-types.asciidoc index 4ca5f5a94ca..1ca05b793c2 100644 --- a/docs/reference/mapping/types/core-types.asciidoc +++ b/docs/reference/mapping/types/core-types.asciidoc @@ -378,9 +378,6 @@ defaults to `true` or to the parent `object` type setting. |`ignore_malformed` |Ignored a malformed number. Defaults to `false`. -|`numeric_resolution` |The unit to use when passed in a numeric values. Possible -values include `seconds` and `milliseconds` (default). - |======================================================================= [float] diff --git a/docs/reference/migration/migrate_2_0.asciidoc b/docs/reference/migration/migrate_2_0.asciidoc index 56a324159f7..84053a82f44 100644 --- a/docs/reference/migration/migrate_2_0.asciidoc +++ b/docs/reference/migration/migrate_2_0.asciidoc @@ -312,6 +312,9 @@ date. This is not supported anymore. If you want to store unix timestamps, you need to specify the appropriate formats in the mapping, namely `epoch_second` or `epoch_millis`. +In addition the `numeric_resolution` mapping parameter is ignored. Use the +`epoch_second` and `epoch_millis` date formats instead. + ==== Source field limitations The `_source` field could previously be disabled dynamically. Since this field is a critical piece of many features like the Update API, it is no longer diff --git a/docs/reference/query-dsl/query-string-query.asciidoc b/docs/reference/query-dsl/query-string-query.asciidoc index 0f0763f65c5..9ad68ad4f73 100644 --- a/docs/reference/query-dsl/query-string-query.asciidoc +++ b/docs/reference/query-dsl/query-string-query.asciidoc @@ -80,7 +80,7 @@ providing text to a numeric field) to be ignored. Defaults to `ROOT`. |`time_zone` | Time Zone to be applied to any range query related to dates. See also -http://joda-time.sourceforge.net/api-release/org/joda/time/DateTimeZone.html[JODA timezone]. +http://www.joda.org/joda-time/apidocs/org/joda/time/DateTimeZone.html[JODA timezone]. |======================================================================= When a multi term query is being generated, one can control how it gets