mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-07 11:29:35 +00:00
Merge branch 'master' into feature/query-refactoring
Conflicts: core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java
This commit is contained in:
commit
2d42839eef
@ -45,8 +45,8 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
|
||||
|
||||
public static final String REST_EXCEPTION_SKIP_CAUSE = "rest.exception.cause.skip";
|
||||
public static final String REST_EXCEPTION_SKIP_STACK_TRACE = "rest.exception.stacktrace.skip";
|
||||
private static final boolean REST_EXCEPTION_SKIP_STACK_TRACE_DEFAULT = false;
|
||||
private static final boolean REST_EXCEPTION_SKIP_CAUSE_DEFAULT = false;
|
||||
public static final boolean REST_EXCEPTION_SKIP_STACK_TRACE_DEFAULT = true;
|
||||
public static final boolean REST_EXCEPTION_SKIP_CAUSE_DEFAULT = false;
|
||||
private static final String INDEX_HEADER_KEY = "es.index";
|
||||
private static final String SHARD_HEADER_KEY = "es.shard";
|
||||
private static final String RESOURCE_HEADER_TYPE_KEY = "es.resource.type";
|
||||
|
@ -19,45 +19,7 @@
|
||||
|
||||
package org.elasticsearch.action;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.ValidationException;
|
||||
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class ActionRequestValidationException extends IllegalArgumentException {
|
||||
|
||||
private final List<String> validationErrors = new ArrayList<>();
|
||||
|
||||
public ActionRequestValidationException() {
|
||||
super("validation failed");
|
||||
}
|
||||
|
||||
public void addValidationError(String error) {
|
||||
validationErrors.add(error);
|
||||
}
|
||||
|
||||
public void addValidationErrors(Iterable<String> errors) {
|
||||
for (String error : errors) {
|
||||
validationErrors.add(error);
|
||||
}
|
||||
}
|
||||
|
||||
public List<String> validationErrors() {
|
||||
return validationErrors;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getMessage() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("Validation Failed: ");
|
||||
int index = 0;
|
||||
for (String error : validationErrors) {
|
||||
sb.append(++index).append(": ").append(error).append(";");
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
public class ActionRequestValidationException extends ValidationException {
|
||||
}
|
||||
|
@ -25,6 +25,7 @@ import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.net.URLClassLoader;
|
||||
@ -33,12 +34,7 @@ import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.SimpleFileVisitor;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.Arrays;
|
||||
import java.util.Enumeration;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.*;
|
||||
import java.util.jar.JarEntry;
|
||||
import java.util.jar.JarFile;
|
||||
import java.util.jar.Manifest;
|
||||
@ -69,7 +65,7 @@ public class JarHell {
|
||||
logger.debug("sun.boot.class.path: {}", System.getProperty("sun.boot.class.path"));
|
||||
logger.debug("classloader urls: {}", Arrays.toString(((URLClassLoader)loader).getURLs()));
|
||||
}
|
||||
checkJarHell(((URLClassLoader)loader).getURLs());
|
||||
checkJarHell(((URLClassLoader) loader).getURLs());
|
||||
}
|
||||
|
||||
/**
|
||||
@ -141,6 +137,7 @@ public class JarHell {
|
||||
// give a nice error if jar requires a newer java version
|
||||
String targetVersion = manifest.getMainAttributes().getValue("X-Compile-Target-JDK");
|
||||
if (targetVersion != null) {
|
||||
checkVersionFormat(targetVersion);
|
||||
checkJavaVersion(jar.toString(), targetVersion);
|
||||
}
|
||||
|
||||
@ -153,23 +150,34 @@ public class JarHell {
|
||||
}
|
||||
}
|
||||
|
||||
public static void checkVersionFormat(String targetVersion) {
|
||||
if (!JavaVersion.isValid(targetVersion)) {
|
||||
throw new IllegalStateException(
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"version string must be a sequence of nonnegative decimal integers separated by \".\"'s and may have leading zeros but was %s",
|
||||
targetVersion
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks that the java specification version {@code targetVersion}
|
||||
* required by {@code resource} is compatible with the current installation.
|
||||
*/
|
||||
public static void checkJavaVersion(String resource, String targetVersion) {
|
||||
String systemVersion = System.getProperty("java.specification.version");
|
||||
float current = Float.POSITIVE_INFINITY;
|
||||
float target = Float.NEGATIVE_INFINITY;
|
||||
try {
|
||||
current = Float.parseFloat(systemVersion);
|
||||
target = Float.parseFloat(targetVersion);
|
||||
} catch (NumberFormatException e) {
|
||||
// some spec changed, time for a more complex parser
|
||||
}
|
||||
if (current < target) {
|
||||
throw new IllegalStateException(resource + " requires Java " + targetVersion
|
||||
+ ", your system: " + systemVersion);
|
||||
JavaVersion version = JavaVersion.parse(targetVersion);
|
||||
if (JavaVersion.current().compareTo(version) < 0) {
|
||||
throw new IllegalStateException(
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"%s requires Java %s:, your system: %s",
|
||||
resource,
|
||||
targetVersion,
|
||||
JavaVersion.current().toString()
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,87 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
class JavaVersion implements Comparable<JavaVersion> {
|
||||
private final List<Integer> version;
|
||||
|
||||
public List<Integer> getVersion() {
|
||||
return Collections.unmodifiableList(version);
|
||||
}
|
||||
|
||||
private JavaVersion(List<Integer> version) {
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
public static JavaVersion parse(String value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException("value");
|
||||
}
|
||||
if ("".equals(value)) {
|
||||
throw new IllegalArgumentException("value");
|
||||
}
|
||||
|
||||
List<Integer> version = new ArrayList<>();
|
||||
String[] components = value.split("\\.");
|
||||
for (String component : components) {
|
||||
version.add(Integer.valueOf(component));
|
||||
}
|
||||
|
||||
return new JavaVersion(version);
|
||||
}
|
||||
|
||||
public static boolean isValid(String value) {
|
||||
if (!value.matches("^0*[0-9]+(\\.[0-9]+)*$")) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private final static JavaVersion CURRENT = parse(System.getProperty("java.specification.version"));
|
||||
|
||||
public static JavaVersion current() {
|
||||
return CURRENT;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(JavaVersion o) {
|
||||
int len = Math.max(version.size(), o.version.size());
|
||||
for (int i = 0; i < len; i++) {
|
||||
int d = (i < version.size() ? version.get(i) : 0);
|
||||
int s = (i < o.version.size() ? o.version.get(i) : 0);
|
||||
if (s < d)
|
||||
return 1;
|
||||
if (s > d)
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return Strings.collectionToDelimitedString(version, ".");
|
||||
}
|
||||
}
|
@ -24,7 +24,6 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
@ -46,6 +45,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.ValidationException;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
@ -60,12 +60,15 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.indices.*;
|
||||
import org.elasticsearch.indices.IndexAlreadyExistsException;
|
||||
import org.elasticsearch.indices.IndexCreationException;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.indices.InvalidIndexNameException;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.joda.time.DateTime;
|
||||
@ -514,6 +517,15 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
||||
}
|
||||
|
||||
public void validateIndexSettings(String indexName, Settings settings) throws IndexCreationException {
|
||||
List<String> validationErrors = getIndexSettingsValidationErrors(settings);
|
||||
if (validationErrors.isEmpty() == false) {
|
||||
ValidationException validationException = new ValidationException();
|
||||
validationException.addValidationErrors(validationErrors);
|
||||
throw new IndexCreationException(new Index(indexName), validationException);
|
||||
}
|
||||
}
|
||||
|
||||
List<String> getIndexSettingsValidationErrors(Settings settings) {
|
||||
String customPath = settings.get(IndexMetaData.SETTING_DATA_PATH, null);
|
||||
List<String> validationErrors = Lists.newArrayList();
|
||||
if (customPath != null && env.sharedDataFile() == null) {
|
||||
@ -530,22 +542,9 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
||||
validationErrors.add("index must have 1 or more primary shards");
|
||||
}
|
||||
if (number_of_replicas != null && number_of_replicas < 0) {
|
||||
validationErrors.add("index must have 0 or more replica shards");
|
||||
validationErrors.add("index must have 0 or more replica shards");
|
||||
}
|
||||
if (validationErrors.isEmpty() == false) {
|
||||
throw new IndexCreationException(new Index(indexName),
|
||||
new IllegalArgumentException(getMessage(validationErrors)));
|
||||
}
|
||||
}
|
||||
|
||||
private String getMessage(List<String> validationErrors) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("Validation Failed: ");
|
||||
int index = 0;
|
||||
for (String error : validationErrors) {
|
||||
sb.append(++index).append(": ").append(error).append(";");
|
||||
}
|
||||
return sb.toString();
|
||||
return validationErrors;
|
||||
}
|
||||
|
||||
private static class DefaultIndexTemplateFilter implements IndexTemplateFilter {
|
||||
|
@ -29,12 +29,12 @@ import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.TimeoutClusterStateUpdateTask;
|
||||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.ValidationException;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.indices.IndexCreationException;
|
||||
import org.elasticsearch.indices.IndexTemplateAlreadyExistsException;
|
||||
import org.elasticsearch.indices.IndexTemplateMissingException;
|
||||
import org.elasticsearch.indices.InvalidIndexTemplateException;
|
||||
@ -179,41 +179,44 @@ public class MetaDataIndexTemplateService extends AbstractComponent {
|
||||
}
|
||||
|
||||
private void validate(PutRequest request) {
|
||||
List<String> validationErrors = Lists.newArrayList();
|
||||
if (request.name.contains(" ")) {
|
||||
throw new InvalidIndexTemplateException(request.name, "name must not contain a space");
|
||||
validationErrors.add("name must not contain a space");
|
||||
}
|
||||
if (request.name.contains(",")) {
|
||||
throw new InvalidIndexTemplateException(request.name, "name must not contain a ','");
|
||||
validationErrors.add("name must not contain a ','");
|
||||
}
|
||||
if (request.name.contains("#")) {
|
||||
throw new InvalidIndexTemplateException(request.name, "name must not contain a '#'");
|
||||
validationErrors.add("name must not contain a '#'");
|
||||
}
|
||||
if (request.name.startsWith("_")) {
|
||||
throw new InvalidIndexTemplateException(request.name, "name must not start with '_'");
|
||||
validationErrors.add("name must not start with '_'");
|
||||
}
|
||||
if (!request.name.toLowerCase(Locale.ROOT).equals(request.name)) {
|
||||
throw new InvalidIndexTemplateException(request.name, "name must be lower cased");
|
||||
validationErrors.add("name must be lower cased");
|
||||
}
|
||||
if (request.template.contains(" ")) {
|
||||
throw new InvalidIndexTemplateException(request.name, "template must not contain a space");
|
||||
validationErrors.add("template must not contain a space");
|
||||
}
|
||||
if (request.template.contains(",")) {
|
||||
throw new InvalidIndexTemplateException(request.name, "template must not contain a ','");
|
||||
validationErrors.add("template must not contain a ','");
|
||||
}
|
||||
if (request.template.contains("#")) {
|
||||
throw new InvalidIndexTemplateException(request.name, "template must not contain a '#'");
|
||||
validationErrors.add("template must not contain a '#'");
|
||||
}
|
||||
if (request.template.startsWith("_")) {
|
||||
throw new InvalidIndexTemplateException(request.name, "template must not start with '_'");
|
||||
validationErrors.add("template must not start with '_'");
|
||||
}
|
||||
if (!Strings.validFileNameExcludingAstrix(request.template)) {
|
||||
throw new InvalidIndexTemplateException(request.name, "template must not container the following characters " + Strings.INVALID_FILENAME_CHARS);
|
||||
validationErrors.add("template must not container the following characters " + Strings.INVALID_FILENAME_CHARS);
|
||||
}
|
||||
|
||||
try {
|
||||
metaDataCreateIndexService.validateIndexSettings(request.name, request.settings);
|
||||
} catch (IndexCreationException exception) {
|
||||
throw new InvalidIndexTemplateException(request.name, exception.getDetailedMessage());
|
||||
List<String> indexSettingsValidation = metaDataCreateIndexService.getIndexSettingsValidationErrors(request.settings);
|
||||
validationErrors.addAll(indexSettingsValidation);
|
||||
if (!validationErrors.isEmpty()) {
|
||||
ValidationException validationException = new ValidationException();
|
||||
validationException.addValidationErrors(validationErrors);
|
||||
throw new InvalidIndexTemplateException(request.name, validationException.getMessage());
|
||||
}
|
||||
|
||||
for (Alias alias : request.aliases) {
|
||||
@ -271,7 +274,7 @@ public class MetaDataIndexTemplateService extends AbstractComponent {
|
||||
this.mappings.putAll(mappings);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public PutRequest aliases(Set<Alias> aliases) {
|
||||
this.aliases.addAll(aliases);
|
||||
return this;
|
||||
|
@ -0,0 +1,71 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Encapsulates an accumulation of validation errors
|
||||
*/
|
||||
public class ValidationException extends IllegalArgumentException {
|
||||
private final List<String> validationErrors = new ArrayList<>();
|
||||
|
||||
public ValidationException() {
|
||||
super("validation failed");
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new validation error to the accumulating validation errors
|
||||
* @param error the error to add
|
||||
*/
|
||||
public void addValidationError(String error) {
|
||||
validationErrors.add(error);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a sequence of validation errors to the accumulating validation errors
|
||||
* @param errors the errors to add
|
||||
*/
|
||||
public void addValidationErrors(Iterable<String> errors) {
|
||||
for (String error : errors) {
|
||||
validationErrors.add(error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the validation errors accumulated
|
||||
* @return
|
||||
*/
|
||||
public List<String> validationErrors() {
|
||||
return validationErrors;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getMessage() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("Validation Failed: ");
|
||||
int index = 0;
|
||||
for (String error : validationErrors) {
|
||||
sb.append(++index).append(": ").append(error).append(";");
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
@ -261,6 +261,9 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
||||
if (mapper.type().length() == 0) {
|
||||
throw new InvalidTypeNameException("mapping type name is empty");
|
||||
}
|
||||
if (Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0_beta1) && mapper.type().length() > 255) {
|
||||
throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] is too long; limit is length 255 but was [" + mapper.type().length() + "]");
|
||||
}
|
||||
if (mapper.type().charAt(0) == '_') {
|
||||
throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] can't start with '_'");
|
||||
}
|
||||
|
@ -88,6 +88,8 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
|
||||
|
||||
private String timeZone;
|
||||
|
||||
private Boolean escape;
|
||||
|
||||
/** To limit effort spent determinizing regexp queries. */
|
||||
private Integer maxDeterminizedStates;
|
||||
|
||||
@ -318,6 +320,14 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set to <tt>true</tt> to enable escaping of the query string
|
||||
*/
|
||||
public QueryStringQueryBuilder escape(boolean escape) {
|
||||
this.escape = escape;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(NAME);
|
||||
@ -401,6 +411,9 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
|
||||
if (timeZone != null) {
|
||||
builder.field("time_zone", timeZone);
|
||||
}
|
||||
if (escape != null) {
|
||||
builder.field("escape", escape);
|
||||
}
|
||||
printBoostAndQueryName(builder);
|
||||
builder.endObject();
|
||||
}
|
||||
|
@ -120,6 +120,7 @@ public class PluginInfo implements Streamable, ToXContent {
|
||||
if (javaVersionString == null) {
|
||||
throw new IllegalArgumentException("Property [java.version] is missing for jvm plugin [" + name + "]");
|
||||
}
|
||||
JarHell.checkVersionFormat(javaVersionString);
|
||||
JarHell.checkJavaVersion(name, javaVersionString);
|
||||
isolated = Boolean.parseBoolean(props.getProperty("isolated", "true"));
|
||||
classname = props.getProperty("classname");
|
||||
|
@ -25,6 +25,8 @@ import org.elasticsearch.bootstrap.Elasticsearch;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
@ -115,11 +117,20 @@ public class BytesRestResponse extends RestResponse {
|
||||
return this.status;
|
||||
}
|
||||
|
||||
private static final ESLogger SUPPRESSED_ERROR_LOGGER = ESLoggerFactory.getLogger("rest.suppressed");
|
||||
|
||||
private static XContentBuilder convert(RestChannel channel, RestStatus status, Throwable t) throws IOException {
|
||||
XContentBuilder builder = channel.newErrorBuilder().startObject();
|
||||
if (t == null) {
|
||||
builder.field("error", "unknown");
|
||||
} else if (channel.detailedErrorsEnabled()) {
|
||||
final ToXContent.Params params;
|
||||
if (channel.request().paramAsBoolean("error_trace", !ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE_DEFAULT)) {
|
||||
params = new ToXContent.DelegatingMapParams(Collections.singletonMap(ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE, "false"), channel.request());
|
||||
} else {
|
||||
SUPPRESSED_ERROR_LOGGER.info("{} Params: {}", t, channel.request().path(), channel.request().params());
|
||||
params = channel.request();
|
||||
}
|
||||
builder.field("error");
|
||||
builder.startObject();
|
||||
final ElasticsearchException[] rootCauses = ElasticsearchException.guessRootCauses(t);
|
||||
@ -127,16 +138,13 @@ public class BytesRestResponse extends RestResponse {
|
||||
builder.startArray();
|
||||
for (ElasticsearchException rootCause : rootCauses){
|
||||
builder.startObject();
|
||||
rootCause.toXContent(builder, new ToXContent.DelegatingMapParams(Collections.singletonMap(ElasticsearchException.REST_EXCEPTION_SKIP_CAUSE, "true"), channel.request()));
|
||||
rootCause.toXContent(builder, new ToXContent.DelegatingMapParams(Collections.singletonMap(ElasticsearchException.REST_EXCEPTION_SKIP_CAUSE, "true"), params));
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endArray();
|
||||
|
||||
ElasticsearchException.toXContent(builder, channel.request(), t);
|
||||
ElasticsearchException.toXContent(builder, params, t);
|
||||
builder.endObject();
|
||||
if (channel.request().paramAsBoolean("error_trace", false)) {
|
||||
buildErrorTrace(t, builder);
|
||||
}
|
||||
} else {
|
||||
builder.field("error", simpleMessage(t));
|
||||
}
|
||||
@ -145,45 +153,6 @@ public class BytesRestResponse extends RestResponse {
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
||||
private static void buildErrorTrace(Throwable t, XContentBuilder builder) throws IOException {
|
||||
builder.startObject("error_trace");
|
||||
boolean first = true;
|
||||
int counter = 0;
|
||||
while (t != null) {
|
||||
// bail if there are more than 10 levels, becomes useless really...
|
||||
if (counter++ > 10) {
|
||||
break;
|
||||
}
|
||||
if (!first) {
|
||||
builder.startObject("cause");
|
||||
}
|
||||
buildThrowable(t, builder);
|
||||
if (!first) {
|
||||
builder.endObject();
|
||||
}
|
||||
t = t.getCause();
|
||||
first = false;
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
private static void buildThrowable(Throwable t, XContentBuilder builder) throws IOException {
|
||||
builder.field("message", t.getMessage());
|
||||
for (StackTraceElement stElement : t.getStackTrace()) {
|
||||
builder.startObject("at")
|
||||
.field("class", stElement.getClassName())
|
||||
.field("method", stElement.getMethodName());
|
||||
if (stElement.getFileName() != null) {
|
||||
builder.field("file", stElement.getFileName());
|
||||
}
|
||||
if (stElement.getLineNumber() >= 0) {
|
||||
builder.field("line", stElement.getLineNumber());
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Builds a simple error string from the message of the first ElasticsearchException
|
||||
*/
|
||||
|
@ -186,9 +186,8 @@ public class AggregationPath {
|
||||
}
|
||||
|
||||
public AggregationPath subPath(int offset, int length) {
|
||||
PathElement[] subTokens = new PathElement[length];
|
||||
System.arraycopy(pathElements, offset, subTokens, 0, length);
|
||||
return new AggregationPath(pathElements);
|
||||
List<PathElement> subTokens = new ArrayList<>(pathElements.subList(offset, offset + length));
|
||||
return new AggregationPath(subTokens);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -266,12 +265,12 @@ public class AggregationPath {
|
||||
}
|
||||
return aggregator;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Resolves the topmost aggregator pointed by this path using the given root as a point of reference.
|
||||
*
|
||||
* @param root The point of reference of this path
|
||||
* @return The first child aggregator of the root pointed by this path
|
||||
* @return The first child aggregator of the root pointed by this path
|
||||
*/
|
||||
public Aggregator resolveTopmostAggregator(Aggregator root) {
|
||||
AggregationPath.PathElement token = pathElements.get(0);
|
||||
@ -279,7 +278,7 @@ public class AggregationPath {
|
||||
assert (aggregator instanceof SingleBucketAggregator )
|
||||
|| (aggregator instanceof NumericMetricsAggregator) : "this should be picked up before aggregation execution - on validate";
|
||||
return aggregator;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates this path over the given aggregator as a point of reference.
|
||||
|
@ -58,7 +58,7 @@ import java.util.Collections;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class ESExceptionTests extends ESTestCase {
|
||||
private static final ToXContent.Params PARAMS = new ToXContent.MapParams(Collections.singletonMap(ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE, "true"));
|
||||
private static final ToXContent.Params PARAMS = ToXContent.EMPTY_PARAMS;
|
||||
|
||||
@Test
|
||||
public void testStatus() {
|
||||
|
@ -541,7 +541,7 @@ public class ExceptionSerializationTests extends ESTestCase {
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
x.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap(ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE, "true")));
|
||||
x.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
return builder.string();
|
||||
} catch (IOException e) {
|
||||
|
@ -33,13 +33,47 @@ import org.elasticsearch.indices.InvalidIndexTemplateException;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
|
||||
public class MetaDataIndexTemplateServiceTests extends ESTestCase {
|
||||
@Test
|
||||
public void testIndexTemplateInvalidNumberOfShards() throws IOException {
|
||||
public void testIndexTemplateInvalidNumberOfShards() {
|
||||
PutRequest request = new PutRequest("test", "test_shards");
|
||||
request.template("test_shards*");
|
||||
|
||||
Map<String, Object> map = Maps.newHashMap();
|
||||
map.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "0");
|
||||
request.settings(Settings.settingsBuilder().put(map).build());
|
||||
|
||||
List<Throwable> throwables = putTemplate(request);
|
||||
assertEquals(throwables.size(), 1);
|
||||
assertThat(throwables.get(0), instanceOf(InvalidIndexTemplateException.class));
|
||||
assertThat(throwables.get(0).getMessage(), containsString("index must have 1 or more primary shards"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIndexTemplateValidationAccumulatesValidationErrors() {
|
||||
PutRequest request = new PutRequest("test", "putTemplate shards");
|
||||
request.template("_test_shards*");
|
||||
|
||||
Map<String, Object> map = Maps.newHashMap();
|
||||
map.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "0");
|
||||
request.settings(Settings.settingsBuilder().put(map).build());
|
||||
|
||||
List<Throwable> throwables = putTemplate(request);
|
||||
assertEquals(throwables.size(), 1);
|
||||
assertThat(throwables.get(0), instanceOf(InvalidIndexTemplateException.class));
|
||||
assertThat(throwables.get(0).getMessage(), containsString("name must not contain a space"));
|
||||
assertThat(throwables.get(0).getMessage(), containsString("template must not start with '_'"));
|
||||
assertThat(throwables.get(0).getMessage(), containsString("index must have 1 or more primary shards"));
|
||||
}
|
||||
|
||||
private static List<Throwable> putTemplate(PutRequest request) {
|
||||
MetaDataCreateIndexService createIndexService = new MetaDataCreateIndexService(
|
||||
Settings.EMPTY,
|
||||
null,
|
||||
@ -55,13 +89,6 @@ public class MetaDataIndexTemplateServiceTests extends ESTestCase {
|
||||
);
|
||||
MetaDataIndexTemplateService service = new MetaDataIndexTemplateService(Settings.EMPTY, null, createIndexService, null);
|
||||
|
||||
PutRequest request = new PutRequest("test", "test_shards");
|
||||
request.template("test_shards*");
|
||||
|
||||
Map<String, Object> map = Maps.newHashMap();
|
||||
map.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "0");
|
||||
request.settings(Settings.settingsBuilder().put(map).build());
|
||||
|
||||
final List<Throwable> throwables = Lists.newArrayList();
|
||||
service.putTemplate(request, new MetaDataIndexTemplateService.PutListener() {
|
||||
@Override
|
||||
@ -74,8 +101,7 @@ public class MetaDataIndexTemplateServiceTests extends ESTestCase {
|
||||
throwables.add(t);
|
||||
}
|
||||
});
|
||||
assertEquals(throwables.size(), 1);
|
||||
assertTrue(throwables.get(0) instanceof InvalidIndexTemplateException);
|
||||
assertTrue(throwables.get(0).getMessage().contains("index must have 1 or more primary shards"));
|
||||
|
||||
return throwables;
|
||||
}
|
||||
}
|
||||
|
@ -125,7 +125,7 @@ public class MultiSearchRequestTests extends ESTestCase {
|
||||
public void testResponseErrorToXContent() throws IOException {
|
||||
MultiSearchResponse response = new MultiSearchResponse(new MultiSearchResponse.Item[]{new MultiSearchResponse.Item(null, new IllegalStateException("foobar")), new MultiSearchResponse.Item(null, new IllegalStateException("baaaaaazzzz"))});
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
response.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap(ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE, "true")));
|
||||
response.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("\"responses\"[{\"error\":{\"root_cause\":[{\"type\":\"illegal_state_exception\",\"reason\":\"foobar\"}],\"type\":\"illegal_state_exception\",\"reason\":\"foobar\"}},{\"error\":{\"root_cause\":[{\"type\":\"illegal_state_exception\",\"reason\":\"baaaaaazzzz\"}],\"type\":\"illegal_state_exception\",\"reason\":\"baaaaaazzzz\"}}]",
|
||||
builder.string());
|
||||
}
|
||||
|
@ -20,6 +20,7 @@
|
||||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -27,6 +28,8 @@ import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.jar.Attributes;
|
||||
import java.util.jar.JarOutputStream;
|
||||
import java.util.jar.Manifest;
|
||||
@ -153,22 +156,25 @@ public class JarHellTests extends ESTestCase {
|
||||
|
||||
public void testRequiredJDKVersionTooOld() throws Exception {
|
||||
Path dir = createTempDir();
|
||||
String previousJavaVersion = System.getProperty("java.specification.version");
|
||||
System.setProperty("java.specification.version", "1.7");
|
||||
List<Integer> current = JavaVersion.current().getVersion();
|
||||
List<Integer> target = new ArrayList<>(current.size());
|
||||
for (int i = 0; i < current.size(); i++) {
|
||||
target.add(current.get(i) + 1);
|
||||
}
|
||||
JavaVersion targetVersion = JavaVersion.parse(Strings.collectionToDelimitedString(target, "."));
|
||||
|
||||
|
||||
Manifest manifest = new Manifest();
|
||||
Attributes attributes = manifest.getMainAttributes();
|
||||
attributes.put(Attributes.Name.MANIFEST_VERSION, "1.0.0");
|
||||
attributes.put(new Attributes.Name("X-Compile-Target-JDK"), "1.8");
|
||||
attributes.put(new Attributes.Name("X-Compile-Target-JDK"), targetVersion.toString());
|
||||
URL[] jars = {makeJar(dir, "foo.jar", manifest, "Foo.class")};
|
||||
try {
|
||||
JarHell.checkJarHell(jars);
|
||||
fail("did not get expected exception");
|
||||
} catch (IllegalStateException e) {
|
||||
assertTrue(e.getMessage().contains("requires Java 1.8"));
|
||||
assertTrue(e.getMessage().contains("your system: 1.7"));
|
||||
} finally {
|
||||
System.setProperty("java.specification.version", previousJavaVersion);
|
||||
assertTrue(e.getMessage().contains("requires Java " + targetVersion.toString()));
|
||||
assertTrue(e.getMessage().contains("your system: " + JavaVersion.current().toString()));
|
||||
}
|
||||
}
|
||||
|
||||
@ -213,7 +219,12 @@ public class JarHellTests extends ESTestCase {
|
||||
attributes.put(Attributes.Name.MANIFEST_VERSION, "1.0.0");
|
||||
attributes.put(new Attributes.Name("X-Compile-Target-JDK"), "bogus");
|
||||
URL[] jars = {makeJar(dir, "foo.jar", manifest, "Foo.class")};
|
||||
JarHell.checkJarHell(jars);
|
||||
try {
|
||||
JarHell.checkJarHell(jars);
|
||||
fail("did not get expected exception");
|
||||
} catch (IllegalStateException e) {
|
||||
assertTrue(e.getMessage().equals("version string must be a sequence of nonnegative decimal integers separated by \".\"'s and may have leading zeros but was bogus"));
|
||||
}
|
||||
}
|
||||
|
||||
/** make sure if a plugin is compiled against the same ES version, it works */
|
||||
@ -242,4 +253,26 @@ public class JarHellTests extends ESTestCase {
|
||||
assertTrue(e.getMessage().contains("requires Elasticsearch 1.0-bogus"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testValidVersions() {
|
||||
String[] versions = new String[]{"1.7", "1.7.0", "0.1.7", "1.7.0.80"};
|
||||
for (String version : versions) {
|
||||
try {
|
||||
JarHell.checkVersionFormat(version);
|
||||
} catch (IllegalStateException e) {
|
||||
fail(version + " should be accepted as a valid version format");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testInvalidVersions() {
|
||||
String[] versions = new String[]{"", "1.7.0_80", "1.7."};
|
||||
for (String version : versions) {
|
||||
try {
|
||||
JarHell.checkVersionFormat(version);
|
||||
fail("\"" + version + "\"" + " should be rejected as an invalid version format");
|
||||
} catch (IllegalStateException e) {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,79 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.is;
|
||||
|
||||
public class JavaVersionTests extends ESTestCase {
|
||||
@Test
|
||||
public void testParse() {
|
||||
JavaVersion javaVersion = JavaVersion.parse("1.7.0");
|
||||
List<Integer> version = javaVersion.getVersion();
|
||||
assertThat(3, is(version.size()));
|
||||
assertThat(1, is(version.get(0)));
|
||||
assertThat(7, is(version.get(1)));
|
||||
assertThat(0, is(version.get(2)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testToString() {
|
||||
JavaVersion javaVersion = JavaVersion.parse("1.7.0");
|
||||
assertThat("1.7.0", is(javaVersion.toString()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCompare() {
|
||||
JavaVersion onePointSix = JavaVersion.parse("1.6");
|
||||
JavaVersion onePointSeven = JavaVersion.parse("1.7");
|
||||
JavaVersion onePointSevenPointZero = JavaVersion.parse("1.7.0");
|
||||
JavaVersion onePointSevenPointOne = JavaVersion.parse("1.7.1");
|
||||
JavaVersion onePointSevenPointTwo = JavaVersion.parse("1.7.2");
|
||||
JavaVersion onePointSevenPointOnePointOne = JavaVersion.parse("1.7.1.1");
|
||||
JavaVersion onePointSevenPointTwoPointOne = JavaVersion.parse("1.7.2.1");
|
||||
|
||||
assertTrue(onePointSix.compareTo(onePointSeven) < 0);
|
||||
assertTrue(onePointSeven.compareTo(onePointSix) > 0);
|
||||
assertTrue(onePointSix.compareTo(onePointSix) == 0);
|
||||
assertTrue(onePointSeven.compareTo(onePointSevenPointZero) == 0);
|
||||
assertTrue(onePointSevenPointOnePointOne.compareTo(onePointSevenPointOne) > 0);
|
||||
assertTrue(onePointSevenPointTwo.compareTo(onePointSevenPointTwoPointOne) < 0);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValidVersions() {
|
||||
String[] versions = new String[]{"1.7", "1.7.0", "0.1.7", "1.7.0.80"};
|
||||
for (String version : versions) {
|
||||
assertTrue(JavaVersion.isValid(version));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInvalidVersions() {
|
||||
String[] versions = new String[]{"", "1.7.0_80", "1.7."};
|
||||
for (String version : versions) {
|
||||
assertFalse(JavaVersion.isValid(version));
|
||||
}
|
||||
}
|
||||
}
|
@ -19,12 +19,18 @@
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
|
||||
import static org.elasticsearch.test.VersionUtils.getFirstVersion;
|
||||
import static org.elasticsearch.test.VersionUtils.getPreviousVersion;
|
||||
import static org.elasticsearch.test.VersionUtils.randomVersionBetween;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.is;
|
||||
import static org.hamcrest.Matchers.hasToString;
|
||||
|
||||
public class MapperServiceTest extends ESSingleNodeTestCase {
|
||||
@ -46,4 +52,39 @@ public class MapperServiceTest extends ESSingleNodeTestCase {
|
||||
.execute()
|
||||
.actionGet();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testThatLongTypeNameIsNotRejectedOnPreElasticsearchVersionTwo() {
|
||||
String index = "text-index";
|
||||
String field = "field";
|
||||
String type = new String(new char[256]).replace("\0", "a");
|
||||
|
||||
CreateIndexResponse response =
|
||||
client()
|
||||
.admin()
|
||||
.indices()
|
||||
.prepareCreate(index)
|
||||
.setSettings(settings(randomVersionBetween(random(), getFirstVersion(), getPreviousVersion(Version.V_2_0_0_beta1))))
|
||||
.addMapping(type, field, "type=string")
|
||||
.execute()
|
||||
.actionGet();
|
||||
assertNotNull(response);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTypeNameTooLong() {
|
||||
String index = "text-index";
|
||||
String field = "field";
|
||||
String type = new String(new char[256]).replace("\0", "a");
|
||||
|
||||
expectedException.expect(MapperParsingException.class);
|
||||
expectedException.expect(hasToString(containsString("mapping type name [" + type + "] is too long; limit is length 255 but was [256]")));
|
||||
client()
|
||||
.admin()
|
||||
.indices()
|
||||
.prepareCreate(index)
|
||||
.addMapping(type, field, "type=string")
|
||||
.execute()
|
||||
.actionGet();
|
||||
}
|
||||
}
|
||||
|
@ -32,6 +32,7 @@ import org.elasticsearch.test.rest.client.http.HttpResponse;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
|
||||
/**
|
||||
* Tests that by default the error_trace parameter can be used to show stacktraces
|
||||
@ -59,6 +60,16 @@ public class DetailedErrorsEnabledIT extends ESIntegTestCase {
|
||||
.execute();
|
||||
|
||||
assertThat(response.getHeaders().get("Content-Type"), containsString("application/json"));
|
||||
assertThat(response.getBody(), containsString("\"error_trace\":{\"message\":\"Validation Failed"));
|
||||
assertThat(response.getBody(), containsString("\"stack_trace\":\"[Validation Failed: 1: index / indices is missing;]; nested: ActionRequestValidationException[Validation Failed: 1:"));
|
||||
|
||||
// Make the HTTP request
|
||||
response = new HttpRequestBuilder(HttpClients.createDefault())
|
||||
.httpTransport(internalCluster().getDataNodeInstance(HttpServerTransport.class))
|
||||
.path("/")
|
||||
.method(HttpDeleteWithEntity.METHOD_NAME)
|
||||
.execute();
|
||||
|
||||
assertThat(response.getHeaders().get("Content-Type"), containsString("application/json"));
|
||||
assertThat(response.getBody(), not(containsString("\"stack_trace\":\"[Validation Failed: 1: index / indices is missing;]; nested: ActionRequestValidationException[Validation Failed: 1:")));
|
||||
}
|
||||
}
|
||||
|
@ -176,6 +176,25 @@ public class PluginInfoTests extends ESTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
public void testReadFromPropertiesBadJavaVersionFormat() throws Exception {
|
||||
String pluginName = "fake-plugin";
|
||||
Path pluginDir = createTempDir().resolve(pluginName);
|
||||
writeProperties(pluginDir,
|
||||
"description", "fake desc",
|
||||
"name", pluginName,
|
||||
"elasticsearch.version", Version.CURRENT.toString(),
|
||||
"java.version", "1.7.0_80",
|
||||
"classname", "FakePlugin",
|
||||
"version", "1.0",
|
||||
"jvm", "true");
|
||||
try {
|
||||
PluginInfo.readFromProperties(pluginDir);
|
||||
fail("expected bad java version format exception");
|
||||
} catch (IllegalStateException e) {
|
||||
assertTrue(e.getMessage(), e.getMessage().equals("version string must be a sequence of nonnegative decimal integers separated by \".\"'s and may have leading zeros but was 1.7.0_80"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testReadFromPropertiesBogusElasticsearchVersion() throws Exception {
|
||||
Path pluginDir = createTempDir().resolve("fake-plugin");
|
||||
writeProperties(pluginDir,
|
||||
|
@ -563,6 +563,7 @@ public class PluginManagerIT extends ESIntegTestCase {
|
||||
PluginManager.checkForOfficialPlugins("lang-python");
|
||||
PluginManager.checkForOfficialPlugins("mapper-murmur3");
|
||||
PluginManager.checkForOfficialPlugins("mapper-size");
|
||||
PluginManager.checkForOfficialPlugins("discovery-multicast");
|
||||
|
||||
try {
|
||||
PluginManager.checkForOfficialPlugins("elasticsearch-mapper-attachment");
|
||||
|
@ -105,8 +105,8 @@ public class BytesRestResponseTests extends ESTestCase {
|
||||
BytesRestResponse response = new BytesRestResponse(channel, t);
|
||||
String text = response.content().toUtf8();
|
||||
assertThat(text, containsString("\"type\":\"throwable\",\"reason\":\"an error occurred reading data\""));
|
||||
assertThat(text, containsString("{\"type\":\"file_not_found_exception\",\"reason\":\"/foo/bar\"}"));
|
||||
assertThat(text, containsString("\"error_trace\":{\"message\":\"an error occurred reading data\""));
|
||||
assertThat(text, containsString("{\"type\":\"file_not_found_exception\""));
|
||||
assertThat(text, containsString("\"stack_trace\":\"[an error occurred reading data]"));
|
||||
}
|
||||
|
||||
public void testGuessRootCause() throws IOException {
|
||||
@ -176,7 +176,6 @@ public class BytesRestResponseTests extends ESTestCase {
|
||||
|
||||
DetailedExceptionRestChannel(RestRequest request) {
|
||||
super(request, true);
|
||||
request.params().put(ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE, "true");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -22,11 +22,13 @@ import com.google.common.base.Strings;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.IndexFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
|
||||
@ -388,7 +390,7 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
||||
assertThat(bucket.getDocCount(), equalTo(1l));
|
||||
}
|
||||
|
||||
// Check case with only exact term exclude clauses
|
||||
// Check case with only exact term exclude clauses
|
||||
response = client()
|
||||
.prepareSearch("idx")
|
||||
.setTypes("high_card_type")
|
||||
@ -690,11 +692,11 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
||||
}
|
||||
|
||||
/*
|
||||
*
|
||||
*
|
||||
* [foo_val0, foo_val1] [foo_val1, foo_val2] [foo_val2, foo_val3] [foo_val3,
|
||||
* foo_val4] [foo_val4, foo_val5]
|
||||
*
|
||||
*
|
||||
*
|
||||
*
|
||||
* foo_val0 - doc_count: 1 - val_count: 2 foo_val1 - doc_count: 2 -
|
||||
* val_count: 4 foo_val2 - doc_count: 2 - val_count: 4 foo_val3 - doc_count:
|
||||
* 2 - val_count: 4 foo_val4 - doc_count: 2 - val_count: 4 foo_val5 -
|
||||
@ -995,6 +997,36 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void singleValuedField_OrderedByIllegalAgg() throws Exception {
|
||||
boolean asc = true;
|
||||
try {
|
||||
client()
|
||||
.prepareSearch("idx")
|
||||
.setTypes("type")
|
||||
.addAggregation(
|
||||
terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(Terms.Order.aggregation("inner_terms>avg", asc))
|
||||
.subAggregation(terms("inner_terms").field(MULTI_VALUED_FIELD_NAME).subAggregation(avg("avg").field("i"))))
|
||||
.execute().actionGet();
|
||||
fail("Expected an exception");
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
ElasticsearchException[] rootCauses = e.guessRootCauses();
|
||||
if (rootCauses.length == 1) {
|
||||
ElasticsearchException rootCause = rootCauses[0];
|
||||
if (rootCause instanceof AggregationExecutionException) {
|
||||
AggregationExecutionException aggException = (AggregationExecutionException) rootCause;
|
||||
assertThat(aggException.getMessage(), Matchers.startsWith("Invalid terms aggregation order path"));
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void singleValuedField_OrderedBySingleBucketSubAggregationAsc() throws Exception {
|
||||
boolean asc = randomBoolean();
|
||||
|
@ -64,10 +64,16 @@ public class VersionUtils {
|
||||
public static List<Version> allVersions() {
|
||||
return Collections.unmodifiableList(SORTED_VERSIONS);
|
||||
}
|
||||
|
||||
|
||||
public static Version getPreviousVersion(Version version) {
|
||||
int index = SORTED_VERSIONS.indexOf(version);
|
||||
assert index > 0;
|
||||
return SORTED_VERSIONS.get(index - 1);
|
||||
}
|
||||
|
||||
/** Returns the {@link Version} before the {@link Version#CURRENT} */
|
||||
public static Version getPreviousVersion() {
|
||||
Version version = SORTED_VERSIONS.get(SORTED_VERSIONS.size() - 2);
|
||||
Version version = getPreviousVersion(Version.CURRENT);
|
||||
assert version.before(Version.CURRENT);
|
||||
return version;
|
||||
}
|
||||
|
@ -1,764 +0,0 @@
|
||||
# Licensed to Elasticsearch under one or more contributor
|
||||
# license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright
|
||||
# ownership. Elasticsearch licenses this file to you under
|
||||
# the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on
|
||||
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
# either express or implied. See the License for the specific
|
||||
# language governing permissions and limitations under the License.
|
||||
|
||||
import re
|
||||
import tempfile
|
||||
import shutil
|
||||
import os
|
||||
import datetime
|
||||
import json
|
||||
import time
|
||||
import sys
|
||||
import argparse
|
||||
import hmac
|
||||
import urllib
|
||||
import fnmatch
|
||||
import socket
|
||||
import urllib.request
|
||||
import subprocess
|
||||
|
||||
from functools import partial
|
||||
from http.client import HTTPConnection
|
||||
from http.client import HTTPSConnection
|
||||
|
||||
|
||||
"""
|
||||
This tool builds a release from the a given elasticsearch branch.
|
||||
In order to execute it go in the top level directory and run:
|
||||
$ python3 dev_tools/build_release.py --branch 0.90 --publish --remote origin
|
||||
|
||||
By default this script runs in 'dry' mode which essentially simulates a release. If the
|
||||
'--publish' option is set the actual release is done. The script takes over almost all
|
||||
steps necessary for a release from a high level point of view it does the following things:
|
||||
|
||||
- run prerequisit checks ie. check for Java 1.7 being presend or S3 credentials available as env variables
|
||||
- detect the version to release from the specified branch (--branch) or the current branch
|
||||
- creates a release branch & updates pom.xml and Version.java to point to a release version rather than a snapshot
|
||||
- builds the artifacts and runs smoke-tests on the build zip & tar.gz files
|
||||
- commits the new version and merges the release branch into the source branch
|
||||
- creates a tag and pushes the commit to the specified origin (--remote)
|
||||
- publishes the releases to Sonatype and S3
|
||||
|
||||
Once it's done it will print all the remaining steps.
|
||||
|
||||
Prerequisites:
|
||||
- Python 3k for script execution
|
||||
- Boto for S3 Upload ($ apt-get install python-boto)
|
||||
- RPM for RPM building ($ apt-get install rpm)
|
||||
- S3 keys exported via ENV variables (AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
|
||||
- GPG data exported via ENV variables (GPG_KEY_ID, GPG_PASSPHRASE, optionally GPG_KEYRING)
|
||||
- S3 target repository via ENV variables (S3_BUCKET_SYNC_TO, optionally S3_BUCKET_SYNC_FROM)
|
||||
"""
|
||||
env = os.environ
|
||||
|
||||
PLUGINS = [('license', 'elasticsearch/license/latest'),
|
||||
('bigdesk', 'lukas-vlcek/bigdesk'),
|
||||
('paramedic', 'karmi/elasticsearch-paramedic'),
|
||||
('segmentspy', 'polyfractal/elasticsearch-segmentspy'),
|
||||
('inquisitor', 'polyfractal/elasticsearch-inquisitor'),
|
||||
('head', 'mobz/elasticsearch-head')]
|
||||
|
||||
LOG = env.get('ES_RELEASE_LOG', '/tmp/elasticsearch_release.log')
|
||||
|
||||
# console colors
|
||||
COLOR_OK = '\033[92m'
|
||||
COLOR_END = '\033[0m'
|
||||
COLOR_FAIL = '\033[91m'
|
||||
|
||||
def log(msg):
|
||||
log_plain('\n%s' % msg)
|
||||
|
||||
def log_plain(msg):
|
||||
f = open(LOG, mode='ab')
|
||||
f.write(msg.encode('utf-8'))
|
||||
f.close()
|
||||
|
||||
def run(command, quiet=False):
|
||||
log('%s: RUN: %s\n' % (datetime.datetime.now(), command))
|
||||
if os.system('%s >> %s 2>&1' % (command, LOG)):
|
||||
msg = ' FAILED: %s [see log %s]' % (command, LOG)
|
||||
if not quiet:
|
||||
print(msg)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
try:
|
||||
JAVA_HOME = env['JAVA_HOME']
|
||||
except KeyError:
|
||||
raise RuntimeError("""
|
||||
Please set JAVA_HOME in the env before running release tool
|
||||
On OSX use: export JAVA_HOME=`/usr/libexec/java_home -v '1.7*'`""")
|
||||
|
||||
try:
|
||||
JAVA_HOME = env['JAVA7_HOME']
|
||||
except KeyError:
|
||||
pass #no JAVA7_HOME - we rely on JAVA_HOME
|
||||
|
||||
|
||||
try:
|
||||
# make sure mvn3 is used if mvn3 is available
|
||||
# some systems use maven 2 as default
|
||||
subprocess.check_output('mvn3 --version', shell=True, stderr=subprocess.STDOUT)
|
||||
MVN = 'mvn3'
|
||||
except subprocess.CalledProcessError:
|
||||
MVN = 'mvn'
|
||||
|
||||
def java_exe():
|
||||
path = JAVA_HOME
|
||||
return 'export JAVA_HOME="%s" PATH="%s/bin:$PATH" JAVACMD="%s/bin/java"' % (path, path, path)
|
||||
|
||||
def verify_java_version(version):
|
||||
s = os.popen('%s; java -version 2>&1' % java_exe()).read()
|
||||
if ' version "%s.' % version not in s:
|
||||
raise RuntimeError('got wrong version for java %s:\n%s' % (version, s))
|
||||
|
||||
# Verifies the java version. We guarantee that we run with Java 1.7
|
||||
# If 1.7 is not available fail the build!
|
||||
def verify_mvn_java_version(version, mvn):
|
||||
s = os.popen('%s; %s --version 2>&1' % (java_exe(), mvn)).read()
|
||||
if 'Java version: %s' % version not in s:
|
||||
raise RuntimeError('got wrong java version for %s %s:\n%s' % (mvn, version, s))
|
||||
|
||||
# Returns the hash of the current git HEAD revision
|
||||
def get_head_hash():
|
||||
return os.popen(' git rev-parse --verify HEAD 2>&1').read().strip()
|
||||
|
||||
# Returns the hash of the given tag revision
|
||||
def get_tag_hash(tag):
|
||||
return os.popen('git show-ref --tags %s --hash 2>&1' % (tag)).read().strip()
|
||||
|
||||
# Returns the name of the current branch
|
||||
def get_current_branch():
|
||||
return os.popen('git rev-parse --abbrev-ref HEAD 2>&1').read().strip()
|
||||
|
||||
# Utility that returns the name of the release branch for a given version
|
||||
def release_branch(version):
|
||||
return 'release_branch_%s' % version
|
||||
|
||||
# runs get fetch on the given remote
|
||||
def fetch(remote):
|
||||
run('git fetch %s' % remote)
|
||||
|
||||
# Creates a new release branch from the given source branch
|
||||
# and rebases the source branch from the remote before creating
|
||||
# the release branch. Note: This fails if the source branch
|
||||
# doesn't exist on the provided remote.
|
||||
def create_release_branch(remote, src_branch, release):
|
||||
run('git checkout %s' % src_branch)
|
||||
run('git pull --rebase %s %s' % (remote, src_branch))
|
||||
run('git checkout -b %s' % (release_branch(release)))
|
||||
|
||||
|
||||
# Reads the given file and applies the
|
||||
# callback to it. If the callback changed
|
||||
# a line the given file is replaced with
|
||||
# the modified input.
|
||||
def process_file(file_path, line_callback):
|
||||
fh, abs_path = tempfile.mkstemp()
|
||||
modified = False
|
||||
with open(abs_path,'w', encoding='utf-8') as new_file:
|
||||
with open(file_path, encoding='utf-8') as old_file:
|
||||
for line in old_file:
|
||||
new_line = line_callback(line)
|
||||
modified = modified or (new_line != line)
|
||||
new_file.write(new_line)
|
||||
os.close(fh)
|
||||
if modified:
|
||||
#Remove original file
|
||||
os.remove(file_path)
|
||||
#Move new file
|
||||
shutil.move(abs_path, file_path)
|
||||
return True
|
||||
else:
|
||||
# nothing to do - just remove the tmp file
|
||||
os.remove(abs_path)
|
||||
return False
|
||||
|
||||
# Walks the given directory path (defaults to 'docs')
|
||||
# and replaces all 'coming[$version]' tags with
|
||||
# 'added[$version]'. This method only accesses asciidoc files.
|
||||
def update_reference_docs(release_version, path='docs'):
|
||||
pattern = 'coming[%s' % (release_version)
|
||||
replacement = 'added[%s' % (release_version)
|
||||
pending_files = []
|
||||
def callback(line):
|
||||
return line.replace(pattern, replacement)
|
||||
for root, _, file_names in os.walk(path):
|
||||
for file_name in fnmatch.filter(file_names, '*.asciidoc'):
|
||||
full_path = os.path.join(root, file_name)
|
||||
if process_file(full_path, callback):
|
||||
pending_files.append(os.path.join(root, file_name))
|
||||
return pending_files
|
||||
|
||||
# Moves the pom.xml file from a snapshot to a release
|
||||
def remove_maven_snapshot(pom, release):
|
||||
pattern = '<version>%s-SNAPSHOT</version>' % (release)
|
||||
replacement = '<version>%s</version>' % (release)
|
||||
def callback(line):
|
||||
return line.replace(pattern, replacement)
|
||||
process_file(pom, callback)
|
||||
|
||||
# Moves the Version.java file from a snapshot to a release
|
||||
def remove_version_snapshot(version_file, release):
|
||||
# 1.0.0.Beta1 -> 1_0_0_Beta1
|
||||
release = release.replace('.', '_')
|
||||
pattern = 'new Version(V_%s_ID, true' % (release)
|
||||
replacement = 'new Version(V_%s_ID, false' % (release)
|
||||
def callback(line):
|
||||
return line.replace(pattern, replacement)
|
||||
process_file(version_file, callback)
|
||||
|
||||
# Stages the given files for the next git commit
|
||||
def add_pending_files(*files):
|
||||
for file in files:
|
||||
run('git add %s' % (file))
|
||||
|
||||
# Executes a git commit with 'release [version]' as the commit message
|
||||
def commit_release(release):
|
||||
run('git commit -m "release [%s]"' % release)
|
||||
|
||||
def commit_feature_flags(release):
|
||||
run('git commit -m "Update Documentation Feature Flags [%s]"' % release)
|
||||
|
||||
def tag_release(release):
|
||||
run('git tag -a v%s -m "Tag release version %s"' % (release, release))
|
||||
|
||||
def run_mvn(*cmd):
|
||||
for c in cmd:
|
||||
run('%s; %s %s' % (java_exe(), MVN, c))
|
||||
|
||||
def build_release(release_version, run_tests=False, dry_run=True, cpus=1, bwc_version=None):
|
||||
target = 'deploy'
|
||||
if dry_run:
|
||||
target = 'package'
|
||||
if run_tests:
|
||||
run_mvn('clean',
|
||||
'test -Dtests.jvms=%s -Des.node.mode=local' % (cpus),
|
||||
'test -Dtests.jvms=%s -Des.node.mode=network' % (cpus))
|
||||
if bwc_version:
|
||||
print('Running Backwards compatibility tests against version [%s]' % (bwc_version))
|
||||
run_mvn('clean', 'test -Dtests.filter=@backwards -Dtests.bwc.version=%s -Dtests.bwc=true -Dtests.jvms=1' % bwc_version)
|
||||
run_mvn('clean test-compile -Dforbidden.test.signatures="org.apache.lucene.util.LuceneTestCase\$AwaitsFix @ Please fix all bugs before release"')
|
||||
# dont sign the RPM, so older distros will be able to use the uploaded RPM package
|
||||
gpg_args = '-Dgpg.key="%s" -Dgpg.passphrase="%s" -Ddeb.sign=true -Drpm.sign=false' % (env.get('GPG_KEY_ID'), env.get('GPG_PASSPHRASE'))
|
||||
if env.get('GPG_KEYRING'):
|
||||
gpg_args += ' -Dgpg.keyring="%s"' % env.get('GPG_KEYRING')
|
||||
run_mvn('clean %s -DskipTests %s' % (target, gpg_args))
|
||||
success = False
|
||||
try:
|
||||
# create additional signed RPM for the repositories
|
||||
run_mvn('-f distribution/rpm/pom.xml package -DskipTests -Dsign.rpm=true -Drpm.outputDirectory=target/releases/signed/ %s' % (gpg_args))
|
||||
rpm = os.path.join('target/releases/signed', 'elasticsearch-%s.rpm' % release_version)
|
||||
if os.path.isfile(rpm):
|
||||
log('Signed RPM [%s] contains: ' % rpm)
|
||||
run('rpm -pqli %s' % rpm)
|
||||
success = True
|
||||
finally:
|
||||
if not success:
|
||||
print("""
|
||||
RPM Bulding failed make sure "rpm" tools are installed.
|
||||
Use on of the following commands to install:
|
||||
$ brew install rpm # on OSX
|
||||
$ apt-get install rpm # on Ubuntu et.al
|
||||
""")
|
||||
|
||||
# Uses the github API to fetch open tickets for the given release version
|
||||
# if it finds any tickets open for that version it will throw an exception
|
||||
def ensure_no_open_tickets(version):
|
||||
version = "v%s" % version
|
||||
conn = HTTPSConnection('api.github.com')
|
||||
try:
|
||||
log('Checking for open tickets on Github for version %s' % version)
|
||||
log('Check if node is available')
|
||||
conn.request('GET', '/repos/elastic/elasticsearch/issues?state=open&labels=%s' % version, headers= {'User-Agent' : 'Elasticsearch version checker'})
|
||||
res = conn.getresponse()
|
||||
if res.status == 200:
|
||||
issues = json.loads(res.read().decode("utf-8"))
|
||||
if issues:
|
||||
urls = []
|
||||
for issue in issues:
|
||||
urls.append(issue['html_url'])
|
||||
raise RuntimeError('Found open issues for release version %s:\n%s' % (version, '\n'.join(urls)))
|
||||
else:
|
||||
log("No open issues found for version %s" % version)
|
||||
else:
|
||||
raise RuntimeError('Failed to fetch issue list from Github for release version %s' % version)
|
||||
except socket.error as e:
|
||||
log("Failed to fetch issue list from Github for release version %s' % version - Exception: [%s]" % (version, e))
|
||||
#that is ok it might not be there yet
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def wait_for_node_startup(host='127.0.0.1', port=9200,timeout=15):
|
||||
for _ in range(timeout):
|
||||
conn = HTTPConnection(host, port, timeout)
|
||||
try:
|
||||
log('Waiting until node becomes available for 1 second')
|
||||
time.sleep(1)
|
||||
log('Check if node is available')
|
||||
conn.request('GET', '')
|
||||
res = conn.getresponse()
|
||||
if res.status == 200:
|
||||
return True
|
||||
except socket.error as e:
|
||||
log("Failed while waiting for node - Exception: [%s]" % e)
|
||||
#that is ok it might not be there yet
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
return False
|
||||
|
||||
# Ensures we are using a true Lucene release, not a snapshot build:
|
||||
def verify_lucene_version():
|
||||
s = open('pom.xml', encoding='utf-8').read()
|
||||
if 'download.elastic.co/lucenesnapshots' in s:
|
||||
raise RuntimeError('pom.xml contains download.elastic.co/lucenesnapshots repository: remove that before releasing')
|
||||
|
||||
m = re.search(r'<lucene.version>(.*?)</lucene.version>', s)
|
||||
if m is None:
|
||||
raise RuntimeError('unable to locate lucene.version in pom.xml')
|
||||
lucene_version = m.group(1)
|
||||
|
||||
m = re.search(r'<lucene.maven.version>(.*?)</lucene.maven.version>', s)
|
||||
if m is None:
|
||||
raise RuntimeError('unable to locate lucene.maven.version in pom.xml')
|
||||
lucene_maven_version = m.group(1)
|
||||
if lucene_version != lucene_maven_version:
|
||||
raise RuntimeError('pom.xml is still using a snapshot release of lucene (%s): cutover to a real lucene release before releasing' % lucene_maven_version)
|
||||
|
||||
# Checks the pom.xml for the release version.
|
||||
# This method fails if the pom file has no SNAPSHOT version set ie.
|
||||
# if the version is already on a release version we fail.
|
||||
# Returns the next version string ie. 0.90.7
|
||||
def find_release_version(src_branch):
|
||||
run('git checkout %s' % src_branch)
|
||||
with open('pom.xml', encoding='utf-8') as file:
|
||||
for line in file:
|
||||
match = re.search(r'<version>(.+)-SNAPSHOT</version>', line)
|
||||
if match:
|
||||
return match.group(1)
|
||||
raise RuntimeError('Could not find release version in branch %s' % src_branch)
|
||||
|
||||
def artifact_names(release):
|
||||
artifacts = []
|
||||
artifacts.append(os.path.join('distribution/zip/target/releases', 'elasticsearch-%s.zip' % (release)))
|
||||
artifacts.append(os.path.join('distribution/tar/target/releases', 'elasticsearch-%s.tar.gz' % (release)))
|
||||
artifacts.append(os.path.join('distribution/deb/target/releases', 'elasticsearch-%s.deb' % (release)))
|
||||
artifacts.append(os.path.join('distribution/rpm/target/releases', 'elasticsearch-%s.rpm' % (release)))
|
||||
return artifacts
|
||||
|
||||
def get_artifacts(release):
|
||||
common_artifacts = artifact_names(release)
|
||||
for f in common_artifacts:
|
||||
if not os.path.isfile(f):
|
||||
raise RuntimeError('Could not find required artifact at %s' % f)
|
||||
return common_artifacts
|
||||
|
||||
# Sample URL:
|
||||
# http://download.elasticsearch.org/elasticsearch/release/org/elasticsearch/distribution/elasticsearch-rpm/2.0.0-beta1-SNAPSHOT/elasticsearch-rpm-2.0.0-beta1-SNAPSHOT.rpm
|
||||
def download_and_verify(release, files, plugins=None, base_url='https://download.elastic.co/elasticsearch/release/org/elasticsearch/distribution'):
|
||||
print('Downloading and verifying release %s from %s' % (release, base_url))
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
try:
|
||||
downloaded_files = []
|
||||
for file in files:
|
||||
name = os.path.basename(file)
|
||||
if name.endswith('tar.gz'):
|
||||
url = '%s/tar/elasticsearch/%s/%s' % (base_url, release, name)
|
||||
elif name.endswith('zip'):
|
||||
url = '%s/zip/elasticsearch/%s/%s' % (base_url, release, name)
|
||||
elif name.endswith('rpm'):
|
||||
url = '%s/rpm/elasticsearch/%s/%s' % (base_url, release, name)
|
||||
elif name.endswith('deb'):
|
||||
url = '%s/deb/elasticsearch/%s/%s' % (base_url, release, name)
|
||||
abs_file_path = os.path.join(tmp_dir, name)
|
||||
print(' Downloading %s' % (url))
|
||||
downloaded_files.append(abs_file_path)
|
||||
urllib.request.urlretrieve(url, abs_file_path)
|
||||
url = ''.join([url, '.sha1'])
|
||||
checksum_file = os.path.join(tmp_dir, ''.join([abs_file_path, '.sha1']))
|
||||
urllib.request.urlretrieve(url, checksum_file)
|
||||
print(' Verifying checksum %s' % (checksum_file))
|
||||
run('cd %s && sha1sum -c %s' % (tmp_dir, os.path.basename(checksum_file)))
|
||||
smoke_test_release(release, downloaded_files, get_tag_hash('v%s' % release), plugins)
|
||||
print(' SUCCESS')
|
||||
finally:
|
||||
shutil.rmtree(tmp_dir)
|
||||
|
||||
def smoke_test_release(release, files, expected_hash, plugins):
|
||||
for release_file in files:
|
||||
if not os.path.isfile(release_file):
|
||||
raise RuntimeError('Smoketest failed missing file %s' % (release_file))
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
if release_file.endswith('tar.gz'):
|
||||
run('tar -xzf %s -C %s' % (release_file, tmp_dir))
|
||||
elif release_file.endswith('zip'):
|
||||
run('unzip %s -d %s' % (release_file, tmp_dir))
|
||||
else:
|
||||
log('Skip SmokeTest for [%s]' % release_file)
|
||||
continue # nothing to do here
|
||||
es_run_path = os.path.join(tmp_dir, 'elasticsearch-%s' % (release), 'bin/elasticsearch')
|
||||
print(' Smoke testing package [%s]' % release_file)
|
||||
es_plugin_path = os.path.join(tmp_dir, 'elasticsearch-%s' % (release),'bin/plugin')
|
||||
plugin_names = {}
|
||||
for name, plugin in plugins:
|
||||
print(' Install plugin [%s] from [%s]' % (name, plugin))
|
||||
run('%s; %s install %s' % (java_exe(), es_plugin_path, plugin))
|
||||
plugin_names[name] = True
|
||||
|
||||
background = '-d'
|
||||
print(' Starting elasticsearch deamon from [%s]' % os.path.join(tmp_dir, 'elasticsearch-%s' % release))
|
||||
run('%s; %s -Des.node.name=smoke_tester -Des.cluster.name=prepare_release -Des.script.inline=on -Des.script.indexed=on %s'
|
||||
% (java_exe(), es_run_path, background))
|
||||
conn = HTTPConnection('127.0.0.1', 9200, 20);
|
||||
wait_for_node_startup()
|
||||
try:
|
||||
try:
|
||||
conn.request('GET', '')
|
||||
res = conn.getresponse()
|
||||
if res.status == 200:
|
||||
version = json.loads(res.read().decode("utf-8"))['version']
|
||||
if release != version['number']:
|
||||
raise RuntimeError('Expected version [%s] but was [%s]' % (release, version['number']))
|
||||
if version['build_snapshot']:
|
||||
raise RuntimeError('Expected non snapshot version')
|
||||
if version['build_hash'].strip() != expected_hash:
|
||||
raise RuntimeError('HEAD hash does not match expected [%s] but got [%s]' % (expected_hash, version['build_hash']))
|
||||
print(' Running REST Spec tests against package [%s]' % release_file)
|
||||
run_mvn('test -Dtests.cluster=%s -Dtests.jvms=1 -Dtests.class=*.*RestTests' % ("127.0.0.1:9300"))
|
||||
print(' Verify if plugins are listed in _nodes')
|
||||
conn.request('GET', '/_nodes?plugin=true&pretty=true')
|
||||
res = conn.getresponse()
|
||||
if res.status == 200:
|
||||
nodes = json.loads(res.read().decode("utf-8"))['nodes']
|
||||
for _, node in nodes.items():
|
||||
node_plugins = node['plugins']
|
||||
for node_plugin in node_plugins:
|
||||
if not plugin_names.get(node_plugin['name'], False):
|
||||
raise RuntimeError('Unexpeced plugin %s' % node_plugin['name'])
|
||||
del plugin_names[node_plugin['name']]
|
||||
if plugin_names:
|
||||
raise RuntimeError('Plugins not loaded %s' % list(plugin_names.keys()))
|
||||
|
||||
else:
|
||||
raise RuntimeError('Expected HTTP 200 but got %s' % res.status)
|
||||
else:
|
||||
raise RuntimeError('Expected HTTP 200 but got %s' % res.status)
|
||||
finally:
|
||||
conn.request('POST', '/_cluster/nodes/_local/_shutdown')
|
||||
time.sleep(1) # give the node some time to shut down
|
||||
if conn.getresponse().status != 200:
|
||||
raise RuntimeError('Expected HTTP 200 but got %s on node shutdown' % res.status)
|
||||
|
||||
finally:
|
||||
conn.close()
|
||||
shutil.rmtree(tmp_dir)
|
||||
|
||||
def merge_tag_push(remote, src_branch, release_version, dry_run):
|
||||
run('git checkout %s' % src_branch)
|
||||
run('git merge %s' % release_branch(release_version))
|
||||
run('git tag v%s' % release_version)
|
||||
if not dry_run:
|
||||
run('git push %s %s' % (remote, src_branch)) # push the commit
|
||||
run('git push %s v%s' % (remote, release_version)) # push the tag
|
||||
else:
|
||||
print(' dryrun [True] -- skipping push to remote %s' % remote)
|
||||
|
||||
def publish_repositories(version, dry_run=True):
|
||||
if dry_run:
|
||||
print('Skipping package repository update')
|
||||
else:
|
||||
print('Triggering repository update for version %s - calling dev-tools/build_repositories.sh %s' % (version, src_branch))
|
||||
# src_branch is a version like 1.5/1.6/2.0/etc.. so we can use this
|
||||
run('dev-tools/build_repositories.sh %s' % src_branch)
|
||||
|
||||
def print_sonatype_notice():
|
||||
settings = os.path.join(os.path.expanduser('~'), '.m2/settings.xml')
|
||||
if os.path.isfile(settings):
|
||||
with open(settings, encoding='utf-8') as settings_file:
|
||||
for line in settings_file:
|
||||
if line.strip() == '<id>sonatype-nexus-snapshots</id>':
|
||||
# moving out - we found the indicator no need to print the warning
|
||||
return
|
||||
print("""
|
||||
NOTE: No sonatype settings detected, make sure you have configured
|
||||
your sonatype credentials in '~/.m2/settings.xml':
|
||||
|
||||
<settings>
|
||||
...
|
||||
<servers>
|
||||
<server>
|
||||
<id>sonatype-nexus-snapshots</id>
|
||||
<username>your-jira-id</username>
|
||||
<password>your-jira-pwd</password>
|
||||
</server>
|
||||
<server>
|
||||
<id>sonatype-nexus-staging</id>
|
||||
<username>your-jira-id</username>
|
||||
<password>your-jira-pwd</password>
|
||||
</server>
|
||||
</servers>
|
||||
...
|
||||
</settings>
|
||||
""")
|
||||
|
||||
def check_command_exists(name, cmd):
|
||||
try:
|
||||
subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError:
|
||||
raise RuntimeError('Could not run command %s - please make sure it is installed' % (name))
|
||||
|
||||
VERSION_FILE = 'src/main/java/org/elasticsearch/Version.java'
|
||||
POM_FILE = 'pom.xml'
|
||||
|
||||
# finds the highest available bwc version to test against
|
||||
def find_bwc_version(release_version, bwc_dir='backwards'):
|
||||
log(' Lookup bwc version in directory [%s]' % bwc_dir)
|
||||
bwc_version = None
|
||||
if os.path.exists(bwc_dir) and os.path.isdir(bwc_dir):
|
||||
max_version = [int(x) for x in release_version.split('.')]
|
||||
for dir in os.listdir(bwc_dir):
|
||||
if os.path.isdir(os.path.join(bwc_dir, dir)) and dir.startswith('elasticsearch-'):
|
||||
version = [int(x) for x in dir[len('elasticsearch-'):].split('.')]
|
||||
if version < max_version: # bwc tests only against smaller versions
|
||||
if (not bwc_version) or version > [int(x) for x in bwc_version.split('.')]:
|
||||
bwc_version = dir[len('elasticsearch-'):]
|
||||
log(' Using bwc version [%s]' % bwc_version)
|
||||
else:
|
||||
log(' bwc directory [%s] does not exists or is not a directory - skipping' % bwc_dir)
|
||||
return bwc_version
|
||||
|
||||
def ensure_checkout_is_clean(branchName):
|
||||
# Make sure no local mods:
|
||||
s = subprocess.check_output('git diff --shortstat', shell=True)
|
||||
if len(s) > 0:
|
||||
raise RuntimeError('git diff --shortstat is non-empty: got:\n%s' % s)
|
||||
|
||||
# Make sure no untracked files:
|
||||
s = subprocess.check_output('git status', shell=True).decode('utf-8', errors='replace')
|
||||
if 'Untracked files:' in s:
|
||||
raise RuntimeError('git status shows untracked files: got:\n%s' % s)
|
||||
|
||||
# Make sure we are on the right branch (NOTE: a bit weak, since we default to current branch):
|
||||
if 'On branch %s' % branchName not in s:
|
||||
raise RuntimeError('git status does not show branch %s: got:\n%s' % (branchName, s))
|
||||
|
||||
# Make sure we have all changes from origin:
|
||||
if 'is behind' in s:
|
||||
raise RuntimeError('git status shows not all changes pulled from origin; try running "git pull origin %s": got:\n%s' % (branchName, s))
|
||||
|
||||
# Make sure we no local unpushed changes (this is supposed to be a clean area):
|
||||
if 'is ahead' in s:
|
||||
raise RuntimeError('git status shows local commits; try running "git fetch origin", "git checkout %s", "git reset --hard origin/%s": got:\n%s' % (branchName, branchName, s))
|
||||
|
||||
# Checks all source files for //NORELEASE comments
|
||||
def check_norelease(path='src'):
|
||||
pattern = re.compile(r'\bnorelease\b', re.IGNORECASE)
|
||||
for root, _, file_names in os.walk(path):
|
||||
for file_name in fnmatch.filter(file_names, '*.java'):
|
||||
full_path = os.path.join(root, file_name)
|
||||
line_number = 0
|
||||
with open(full_path, 'r', encoding='utf-8') as current_file:
|
||||
for line in current_file:
|
||||
line_number = line_number + 1
|
||||
if pattern.search(line):
|
||||
raise RuntimeError('Found //norelease comment in %s line %s' % (full_path, line_number))
|
||||
|
||||
def run_and_print(text, run_function):
|
||||
try:
|
||||
print(text, end='')
|
||||
run_function()
|
||||
print(COLOR_OK + 'OK' + COLOR_END)
|
||||
return True
|
||||
except RuntimeError:
|
||||
print(COLOR_FAIL + 'NOT OK' + COLOR_END)
|
||||
return False
|
||||
|
||||
def check_env_var(text, env_var):
|
||||
try:
|
||||
print(text, end='')
|
||||
env[env_var]
|
||||
print(COLOR_OK + 'OK' + COLOR_END)
|
||||
return True
|
||||
except KeyError:
|
||||
print(COLOR_FAIL + 'NOT OK' + COLOR_END)
|
||||
return False
|
||||
|
||||
def check_environment_and_commandline_tools(check_only):
|
||||
checks = list()
|
||||
checks.append(check_env_var('Checking for AWS env configuration AWS_SECRET_ACCESS_KEY_ID... ', 'AWS_SECRET_ACCESS_KEY'))
|
||||
checks.append(check_env_var('Checking for AWS env configuration AWS_ACCESS_KEY_ID... ', 'AWS_ACCESS_KEY_ID'))
|
||||
checks.append(check_env_var('Checking for SONATYPE env configuration SONATYPE_USERNAME... ', 'SONATYPE_USERNAME'))
|
||||
checks.append(check_env_var('Checking for SONATYPE env configuration SONATYPE_PASSWORD... ', 'SONATYPE_PASSWORD'))
|
||||
checks.append(check_env_var('Checking for GPG env configuration GPG_KEY_ID... ', 'GPG_KEY_ID'))
|
||||
checks.append(check_env_var('Checking for GPG env configuration GPG_PASSPHRASE... ', 'GPG_PASSPHRASE'))
|
||||
checks.append(check_env_var('Checking for S3 repo upload env configuration S3_BUCKET_SYNC_TO... ', 'S3_BUCKET_SYNC_TO'))
|
||||
checks.append(check_env_var('Checking for git env configuration GIT_AUTHOR_NAME... ', 'GIT_AUTHOR_NAME'))
|
||||
checks.append(check_env_var('Checking for git env configuration GIT_AUTHOR_EMAIL... ', 'GIT_AUTHOR_EMAIL'))
|
||||
|
||||
checks.append(run_and_print('Checking command: rpm... ', partial(check_command_exists, 'rpm', 'rpm --version')))
|
||||
checks.append(run_and_print('Checking command: dpkg... ', partial(check_command_exists, 'dpkg', 'dpkg --version')))
|
||||
checks.append(run_and_print('Checking command: gpg... ', partial(check_command_exists, 'gpg', 'gpg --version')))
|
||||
checks.append(run_and_print('Checking command: expect... ', partial(check_command_exists, 'expect', 'expect -v')))
|
||||
checks.append(run_and_print('Checking command: createrepo... ', partial(check_command_exists, 'createrepo', 'createrepo --version')))
|
||||
checks.append(run_and_print('Checking command: s3cmd... ', partial(check_command_exists, 's3cmd', 's3cmd --version')))
|
||||
checks.append(run_and_print('Checking command: apt-ftparchive... ', partial(check_command_exists, 'apt-ftparchive', 'apt-ftparchive --version')))
|
||||
|
||||
# boto, check error code being returned
|
||||
location = os.path.dirname(os.path.realpath(__file__))
|
||||
command = 'python %s/upload-s3.py -h' % (location)
|
||||
checks.append(run_and_print('Testing boto python dependency... ', partial(check_command_exists, 'python-boto', command)))
|
||||
|
||||
checks.append(run_and_print('Checking java version... ', partial(verify_java_version, '1.7')))
|
||||
checks.append(run_and_print('Checking java mvn version... ', partial(verify_mvn_java_version, '1.7', MVN)))
|
||||
|
||||
if check_only:
|
||||
sys.exit(0)
|
||||
|
||||
if False in checks:
|
||||
print("Exiting due to failing checks")
|
||||
sys.exit(0)
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Builds and publishes a Elasticsearch Release')
|
||||
parser.add_argument('--branch', '-b', metavar='RELEASE_BRANCH', default=get_current_branch(),
|
||||
help='The branch to release from. Defaults to the current branch.')
|
||||
parser.add_argument('--cpus', '-c', metavar='1', default=1,
|
||||
help='The number of cpus to use for running the test. Default is [1]')
|
||||
parser.add_argument('--skiptests', '-t', dest='tests', action='store_false',
|
||||
help='Skips tests before release. Tests are run by default.')
|
||||
parser.set_defaults(tests=True)
|
||||
parser.add_argument('--remote', '-r', metavar='origin', default='origin',
|
||||
help='The remote to push the release commit and tag to. Default is [origin]')
|
||||
parser.add_argument('--publish', '-d', dest='dryrun', action='store_false',
|
||||
help='Publishes the release. Disable by default.')
|
||||
parser.add_argument('--smoke', '-s', dest='smoke', default='',
|
||||
help='Smoke tests the given release')
|
||||
parser.add_argument('--bwc', '-w', dest='bwc', metavar='backwards', default='backwards',
|
||||
help='Backwards compatibility version path to use to run compatibility tests against')
|
||||
parser.add_argument('--check-only', dest='check_only', action='store_true',
|
||||
help='Checks and reports for all requirements and then exits')
|
||||
|
||||
parser.set_defaults(dryrun=True)
|
||||
parser.set_defaults(smoke=None)
|
||||
parser.set_defaults(check_only=False)
|
||||
args = parser.parse_args()
|
||||
bwc_path = args.bwc
|
||||
src_branch = args.branch
|
||||
remote = args.remote
|
||||
run_tests = args.tests
|
||||
dry_run = args.dryrun
|
||||
cpus = args.cpus
|
||||
build = not args.smoke
|
||||
smoke_test_version = args.smoke
|
||||
|
||||
check_environment_and_commandline_tools(args.check_only)
|
||||
|
||||
# we print a notice if we can not find the relevant infos in the ~/.m2/settings.xml
|
||||
print_sonatype_notice()
|
||||
|
||||
# we require to build with 1.7
|
||||
verify_java_version('1.7')
|
||||
verify_mvn_java_version('1.7', MVN)
|
||||
|
||||
if os.path.exists(LOG):
|
||||
raise RuntimeError('please remove old release log %s first' % LOG)
|
||||
|
||||
if not dry_run:
|
||||
print('WARNING: dryrun is set to "false" - this will push and publish the release')
|
||||
input('Press Enter to continue...')
|
||||
|
||||
print(''.join(['-' for _ in range(80)]))
|
||||
print('Preparing Release from branch [%s] running tests: [%s] dryrun: [%s]' % (src_branch, run_tests, dry_run))
|
||||
print(' JAVA_HOME is [%s]' % JAVA_HOME)
|
||||
print(' Running with maven command: [%s] ' % (MVN))
|
||||
if build:
|
||||
check_norelease(path='src')
|
||||
ensure_checkout_is_clean(src_branch)
|
||||
verify_lucene_version()
|
||||
release_version = find_release_version(src_branch)
|
||||
ensure_no_open_tickets(release_version)
|
||||
if not dry_run:
|
||||
smoke_test_version = release_version
|
||||
head_hash = get_head_hash()
|
||||
run_mvn('clean') # clean the env!
|
||||
print(' Release version: [%s]' % release_version)
|
||||
create_release_branch(remote, src_branch, release_version)
|
||||
print(' Created release branch [%s]' % (release_branch(release_version)))
|
||||
success = False
|
||||
try:
|
||||
pending_files = [POM_FILE, VERSION_FILE]
|
||||
remove_maven_snapshot(POM_FILE, release_version)
|
||||
remove_version_snapshot(VERSION_FILE, release_version)
|
||||
print(' Done removing snapshot version')
|
||||
add_pending_files(*pending_files) # expects var args use * to expand
|
||||
commit_release(release_version)
|
||||
pending_files = update_reference_docs(release_version)
|
||||
version_head_hash = None
|
||||
# split commits for docs and version to enable easy cherry-picking
|
||||
if pending_files:
|
||||
add_pending_files(*pending_files) # expects var args use * to expand
|
||||
commit_feature_flags(release_version)
|
||||
version_head_hash = get_head_hash()
|
||||
print(' Committed release version [%s]' % release_version)
|
||||
print(''.join(['-' for _ in range(80)]))
|
||||
print('Building Release candidate')
|
||||
input('Press Enter to continue...')
|
||||
if not dry_run:
|
||||
print(' Running maven builds now and publish to Sonatype and S3 - run-tests [%s]' % run_tests)
|
||||
else:
|
||||
print(' Running maven builds now run-tests [%s]' % run_tests)
|
||||
build_release(release_version, run_tests=run_tests, dry_run=dry_run, cpus=cpus, bwc_version=find_bwc_version(release_version, bwc_path))
|
||||
artifacts = get_artifacts(release_version)
|
||||
smoke_test_release(release_version, artifacts, get_head_hash(), PLUGINS)
|
||||
print(''.join(['-' for _ in range(80)]))
|
||||
print('Finish Release -- dry_run: %s' % dry_run)
|
||||
input('Press Enter to continue...')
|
||||
print(' merge release branch, tag and push to %s %s -- dry_run: %s' % (remote, src_branch, dry_run))
|
||||
merge_tag_push(remote, src_branch, release_version, dry_run)
|
||||
print(' Updating package repositories -- dry_run: %s' % dry_run)
|
||||
publish_repositories(src_branch, dry_run=dry_run)
|
||||
cherry_pick_command = '.'
|
||||
if version_head_hash:
|
||||
cherry_pick_command = ' and cherry-pick the documentation changes: \'git cherry-pick %s\' to the development branch' % (version_head_hash)
|
||||
pending_msg = """
|
||||
Release successful pending steps:
|
||||
* create a new vX.Y.Z label on github for the next release, with label color #dddddd (https://github.com/elastic/elasticsearch/labels)
|
||||
* publish the maven artifacts on Sonatype: https://oss.sonatype.org/index.html
|
||||
- here is a guide: http://central.sonatype.org/pages/releasing-the-deployment.html
|
||||
* check if the release is there https://oss.sonatype.org/content/repositories/releases/org/elasticsearch/elasticsearch/%(version)s
|
||||
* announce the release on the website / blog post
|
||||
* tweet about the release
|
||||
* announce the release in the google group/mailinglist
|
||||
* Move to a Snapshot version to the current branch for the next point release%(cherry_pick)s
|
||||
"""
|
||||
print(pending_msg % { 'version' : release_version, 'cherry_pick' : cherry_pick_command} )
|
||||
success = True
|
||||
finally:
|
||||
if not success:
|
||||
run('git reset --hard HEAD')
|
||||
run('git checkout %s' % src_branch)
|
||||
elif dry_run:
|
||||
run('git reset --hard %s' % head_hash)
|
||||
run('git tag -d v%s' % release_version)
|
||||
# we delete this one anyways
|
||||
run('git branch -D %s' % (release_branch(release_version)))
|
||||
else:
|
||||
print("Skipping build - smoketest only against version %s" % smoke_test_version)
|
||||
run_mvn('clean') # clean the env!
|
||||
|
||||
if smoke_test_version:
|
||||
fetch(remote)
|
||||
download_and_verify(smoke_test_version, artifact_names(smoke_test_version), plugins=PLUGINS)
|
273
dev-tools/prepare_release_candidate.py
Normal file
273
dev-tools/prepare_release_candidate.py
Normal file
@ -0,0 +1,273 @@
|
||||
# Licensed to Elasticsearch under one or more contributor
|
||||
# license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright
|
||||
# ownership. Elasticsearch licenses this file to you under
|
||||
# the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on
|
||||
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
# either express or implied. See the License for the specific
|
||||
# language governing permissions and limitations under the License.
|
||||
|
||||
# Prepare a release
|
||||
#
|
||||
# 1. Update the Version.java to remove the snapshot bit
|
||||
# 2. Remove the -SNAPSHOT suffix in all pom.xml files
|
||||
#
|
||||
# USAGE:
|
||||
#
|
||||
# python3 ./dev-tools/prepare-release.py
|
||||
#
|
||||
# Note: Ensure the script is run from the root directory
|
||||
#
|
||||
|
||||
import fnmatch
|
||||
import argparse
|
||||
from prepare_release_update_documentation import update_reference_docs
|
||||
import subprocess
|
||||
import tempfile
|
||||
import re
|
||||
import os
|
||||
import shutil
|
||||
|
||||
VERSION_FILE = 'core/src/main/java/org/elasticsearch/Version.java'
|
||||
POM_FILE = 'pom.xml'
|
||||
MAIL_TEMPLATE = """
|
||||
Hi all
|
||||
|
||||
The new release candidate for %(version)s based on this commit[1] is now available, including the x-plugins, and RPM/deb repos:
|
||||
|
||||
- ZIP [2]
|
||||
- tar.gz [3]
|
||||
- RPM [4]
|
||||
- deb [5]
|
||||
|
||||
Plugins can be installed as follows,
|
||||
|
||||
bin/plugin -Des.plugins.staging=true install cloud-aws
|
||||
|
||||
The same goes for the x-plugins:
|
||||
|
||||
bin/plugin -Des.plugins.staging=true install license
|
||||
bin/plugin -Des.plugins.staging=true install shield
|
||||
bin/plugin -Des.plugins.staging=true install watcher
|
||||
|
||||
To install the deb from an APT repo:
|
||||
|
||||
APT line sources.list line:
|
||||
|
||||
deb http://download.elasticsearch.org/elasticsearch/staging/%(version)s-%(hash)s/repos/elasticsearch/%(major_minor_version)s/debian/ stable main
|
||||
|
||||
To install the RPM, create a YUM file like:
|
||||
|
||||
/etc/yum.repos.d/elasticsearch.repo
|
||||
|
||||
containing:
|
||||
|
||||
[elasticsearch-2.0]
|
||||
name=Elasticsearch repository for packages
|
||||
baseurl=http://download.elasticsearch.org/elasticsearch/staging/%(version)s-%(hash)s/repos/elasticsearch/%(major_minor_version)s/centos
|
||||
gpgcheck=1
|
||||
gpgkey=http://packages.elastic.co/GPG-KEY-elasticsearch
|
||||
enabled=1
|
||||
|
||||
|
||||
[1] https://github.com/elastic/elasticsearch/commit/%(hash)s
|
||||
[2] http://download.elasticsearch.org/elasticsearch/staging/%(version)s-%(hash)s/org/elasticsearch/distribution/zip/elasticsearch/%(version)s/elasticsearch-%(version)s.zip
|
||||
[3] http://download.elasticsearch.org/elasticsearch/staging/%(version)s-%(hash)s/org/elasticsearch/distribution/tar/elasticsearch/%(version)s/elasticsearch-%(version)s.tar.gz
|
||||
[4] http://download.elasticsearch.org/elasticsearch/staging/%(version)s-%(hash)s/org/elasticsearch/distribution/rpm/elasticsearch/%(version)s/elasticsearch-%(version)s.rpm
|
||||
[5] http://download.elasticsearch.org/elasticsearch/staging/%(version)s-%(hash)s/org/elasticsearch/distribution/deb/elasticsearch/%(version)s/elasticsearch-%(version)s.deb
|
||||
"""
|
||||
|
||||
def run(command, env_vars=None):
|
||||
if env_vars:
|
||||
for key, value in env_vars.iter_items():
|
||||
os.putenv(key, value)
|
||||
if os.system('%s' % (command)):
|
||||
raise RuntimeError(' FAILED: %s' % (command))
|
||||
|
||||
def ensure_checkout_is_clean():
|
||||
# Make sure no local mods:
|
||||
s = subprocess.check_output('git diff --shortstat', shell=True).decode('utf-8')
|
||||
if len(s) > 0:
|
||||
raise RuntimeError('git diff --shortstat is non-empty got:\n%s' % s)
|
||||
|
||||
# Make sure no untracked files:
|
||||
s = subprocess.check_output('git status', shell=True).decode('utf-8', errors='replace')
|
||||
if 'Untracked files:' in s:
|
||||
if 'dev-tools/__pycache__/' in s:
|
||||
print('*** NOTE: invoke python with -B to prevent __pycache__ directories ***')
|
||||
raise RuntimeError('git status shows untracked files got:\n%s' % s)
|
||||
|
||||
# Make sure we have all changes from origin:
|
||||
if 'is behind' in s:
|
||||
raise RuntimeError('git status shows not all changes pulled from origin; try running "git pull origin" in this branch got:\n%s' % (s))
|
||||
|
||||
# Make sure we no local unpushed changes (this is supposed to be a clean area):
|
||||
if 'is ahead' in s:
|
||||
raise RuntimeError('git status shows local commits; try running "git fetch origin", "git checkout ", "git reset --hard origin/" in this branch got:\n%s' % (s))
|
||||
|
||||
# Reads the given file and applies the
|
||||
# callback to it. If the callback changed
|
||||
# a line the given file is replaced with
|
||||
# the modified input.
|
||||
def process_file(file_path, line_callback):
|
||||
fh, abs_path = tempfile.mkstemp()
|
||||
modified = False
|
||||
with open(abs_path,'w', encoding='utf-8') as new_file:
|
||||
with open(file_path, encoding='utf-8') as old_file:
|
||||
for line in old_file:
|
||||
new_line = line_callback(line)
|
||||
modified = modified or (new_line != line)
|
||||
new_file.write(new_line)
|
||||
os.close(fh)
|
||||
if modified:
|
||||
#Remove original file
|
||||
os.remove(file_path)
|
||||
#Move new file
|
||||
shutil.move(abs_path, file_path)
|
||||
return True
|
||||
else:
|
||||
# nothing to do - just remove the tmp file
|
||||
os.remove(abs_path)
|
||||
return False
|
||||
|
||||
# Moves the Version.java file from a snapshot to a release
|
||||
def remove_version_snapshot(version_file, release):
|
||||
# 1.0.0.Beta1 -> 1_0_0_Beta1
|
||||
release = release.replace('.', '_')
|
||||
release = release.replace('-', '_')
|
||||
pattern = 'new Version(V_%s_ID, true' % (release)
|
||||
replacement = 'new Version(V_%s_ID, false' % (release)
|
||||
def callback(line):
|
||||
return line.replace(pattern, replacement)
|
||||
processed = process_file(version_file, callback)
|
||||
if not processed:
|
||||
raise RuntimeError('failed to remove snapshot version for %s' % (release))
|
||||
|
||||
def rename_local_meta_files(path):
|
||||
for root, _, file_names in os.walk(path):
|
||||
for file_name in fnmatch.filter(file_names, 'maven-metadata-local.xml*'):
|
||||
full_path = os.path.join(root, file_name)
|
||||
os.rename(full_path, os.path.join(root, file_name.replace('-local', '')))
|
||||
|
||||
# Checks the pom.xml for the release version.
|
||||
# This method fails if the pom file has no SNAPSHOT version set ie.
|
||||
# if the version is already on a release version we fail.
|
||||
# Returns the next version string ie. 0.90.7
|
||||
def find_release_version():
|
||||
with open('pom.xml', encoding='utf-8') as file:
|
||||
for line in file:
|
||||
match = re.search(r'<version>(.+)-SNAPSHOT</version>', line)
|
||||
if match:
|
||||
return match.group(1)
|
||||
raise RuntimeError('Could not find release version in branch')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='Builds and publishes a Elasticsearch Release')
|
||||
parser.add_argument('--deploy', '-d', dest='deploy', action='store_true',
|
||||
help='Installs and Deploys the release on a sonartype staging repository.')
|
||||
parser.add_argument('--skipDocCheck', '-c', dest='skip_doc_check', action='store_false',
|
||||
help='Skips any checks for pending documentation changes')
|
||||
parser.add_argument('--push-s3', '-p', dest='push', action='store_true',
|
||||
help='Pushes artifacts to the S3 staging area')
|
||||
parser.add_argument('--install_only', '-i', dest='install_only', action='store_true',
|
||||
help='Only runs a maven install to skip the remove deployment step')
|
||||
parser.add_argument('--gpg-key', '-k', dest='gpg_key', default="D88E42B4",
|
||||
help='Allows you to specify a different gpg_key to be used instead of the default release key')
|
||||
parser.set_defaults(deploy=False)
|
||||
parser.set_defaults(skip_doc_check=False)
|
||||
parser.set_defaults(push=False)
|
||||
parser.set_defaults(install_only=False)
|
||||
args = parser.parse_args()
|
||||
install_and_deploy = args.deploy
|
||||
skip_doc_check = args.skip_doc_check
|
||||
push = args.push
|
||||
gpg_key = args.gpg_key
|
||||
install_only = args.install_only
|
||||
|
||||
ensure_checkout_is_clean()
|
||||
release_version = find_release_version()
|
||||
if not re.match('(\d+\.\d+)\.*',release_version):
|
||||
raise RuntimeError('illegal release version format: %s' % (release_version))
|
||||
major_minor_version = re.match('(\d+\.\d+)\.*',release_version).group(1)
|
||||
|
||||
print('*** Preparing release version: [%s]' % release_version)
|
||||
|
||||
if not skip_doc_check:
|
||||
print('*** Check for pending documentation changes')
|
||||
pending_files = update_reference_docs(release_version)
|
||||
if pending_files:
|
||||
raise RuntimeError('pending coming[%s] documentation changes found in %s' % (release_version, pending_files))
|
||||
|
||||
|
||||
run('cd dev-tools && mvn versions:set -DnewVersion=%s -DgenerateBackupPoms=false' % (release_version))
|
||||
run('cd rest-api-spec && mvn versions:set -DnewVersion=%s -DgenerateBackupPoms=false' % (release_version))
|
||||
run('mvn versions:set -DnewVersion=%s -DgenerateBackupPoms=false' % (release_version))
|
||||
|
||||
remove_version_snapshot(VERSION_FILE, release_version)
|
||||
|
||||
print('*** Done removing snapshot version. DO NOT COMMIT THIS, WHEN CREATING A RELEASE CANDIDATE.')
|
||||
|
||||
shortHash = subprocess.check_output('git log --pretty=format:"%h" -n 1', shell=True).decode('utf-8')
|
||||
localRepo = '/tmp/elasticsearch-%s-%s' % (release_version, shortHash)
|
||||
localRepoElasticsearch = localRepo + '/org/elasticsearch'
|
||||
if os.path.exists(localRepoElasticsearch):
|
||||
print('clean local repository %s' % localRepoElasticsearch)
|
||||
shutil.rmtree(localRepoElasticsearch)
|
||||
|
||||
if install_only:
|
||||
mvn_targets = 'install'
|
||||
else:
|
||||
mvn_targets = 'install deploy'
|
||||
install_command = 'mvn clean %s -Prelease -Dskip.integ.tests=true -Dgpg.keyname="%s" -Dpackaging.rpm.rpmbuild=/usr/bin/rpmbuild -Drpm.sign=true -Dmaven.repo.local=%s -Dno.commit.pattern="\\bno(n|)commit\\b" -Dforbidden.test.signatures=""' % (mvn_targets, gpg_key, localRepo)
|
||||
clean_repo_command = 'find %s -name _remote.repositories -exec rm {} \;' % (localRepoElasticsearch)
|
||||
rename_metadata_files_command = 'for i in $(find %s -name "maven-metadata-local.xml*") ; do mv "$i" "${i/-local/}" ; done' % (localRepoElasticsearch)
|
||||
s3_sync_command = 's3cmd sync %s s3://download.elasticsearch.org/elasticsearch/staging/%s-%s/org/' % (localRepoElasticsearch, release_version, shortHash)
|
||||
s3_bucket_sync_to = 'download.elasticsearch.org/elasticsearch/staging/%s-%s/repos' % (release_version, shortHash)
|
||||
build_repo_command = 'dev-tools/build_repositories.sh %s' % (major_minor_version)
|
||||
if install_and_deploy:
|
||||
for cmd in [install_command, clean_repo_command]:
|
||||
run(cmd)
|
||||
rename_local_meta_files(localRepoElasticsearch)
|
||||
else:
|
||||
print('')
|
||||
print('*** To create a release candidate run: ')
|
||||
print(' %s' % (install_command))
|
||||
print(' 1. Remove all _remote.repositories: %s' % (clean_repo_command))
|
||||
print(' 2. Rename all maven metadata files: %s' % (rename_metadata_files_command))
|
||||
if push:
|
||||
run(s3_sync_command)
|
||||
env_vars = {'S3_BUCKET_SYNC_TO': s3_bucket_sync_to}
|
||||
run(build_repo_command, env_vars)
|
||||
else:
|
||||
print('')
|
||||
print('*** To push a release candidate to s3 run: ')
|
||||
print(' 1. Sync %s into S3 bucket' % (localRepoElasticsearch))
|
||||
print (' %s' % (s3_sync_command))
|
||||
print(' 2. Create repositories: ')
|
||||
print (' export S3_BUCKET_SYNC_TO="%s"' % (s3_bucket_sync_to))
|
||||
print(' %s' % (build_repo_command))
|
||||
print('')
|
||||
print('NOTE: the above mvn command will promt you several times for the GPG passphrase of the key you specified you can alternatively pass it via -Dgpg.passphrase=yourPassPhrase')
|
||||
print(' since RPM signing doesn\'t support gpg-agents the recommended way to set the password is to add a release profile to your settings.xml:')
|
||||
print("""
|
||||
<profiles>
|
||||
<profile>
|
||||
<id>release</id>
|
||||
<properties>
|
||||
<gpg.passphrase>YourPasswordGoesHere</gpg.passphrase>
|
||||
</properties>
|
||||
</profile>
|
||||
</profiles>
|
||||
""")
|
||||
print('NOTE: Running s3cmd might require you to create a config file with your credentials, if the s3cmd does not support suppliying them via the command line!')
|
||||
print('*** Once the release is deployed and published send out the following mail to dev@elastic.co:')
|
||||
print(MAIL_TEMPLATE % ({'version' : release_version, 'hash': shortHash, 'major_minor_version' : major_minor_version}))
|
||||
|
@ -1,144 +0,0 @@
|
||||
# Licensed to Elasticsearch under one or more contributor
|
||||
# license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright
|
||||
# ownership. Elasticsearch licenses this file to you under
|
||||
# the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on
|
||||
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
# either express or implied. See the License for the specific
|
||||
# language governing permissions and limitations under the License.
|
||||
|
||||
# Prepare a release
|
||||
#
|
||||
# 1. Update the Version.java to remove the snapshot bit
|
||||
# 2. Remove the -SNAPSHOT suffix in all pom.xml files
|
||||
#
|
||||
# USAGE:
|
||||
#
|
||||
# python3 ./dev-tools/prepare-release.py
|
||||
#
|
||||
# Note: Ensure the script is run from the root directory
|
||||
#
|
||||
|
||||
import fnmatch
|
||||
import subprocess
|
||||
import tempfile
|
||||
import re
|
||||
import os
|
||||
import shutil
|
||||
|
||||
VERSION_FILE = 'core/src/main/java/org/elasticsearch/Version.java'
|
||||
POM_FILE = 'pom.xml'
|
||||
|
||||
def run(command):
|
||||
if os.system('%s' % (command)):
|
||||
raise RuntimeError(' FAILED: %s' % (command))
|
||||
|
||||
def ensure_checkout_is_clean():
|
||||
# Make sure no local mods:
|
||||
s = subprocess.check_output('git diff --shortstat', shell=True)
|
||||
if len(s) > 0:
|
||||
raise RuntimeError('git diff --shortstat is non-empty: got:\n%s' % s)
|
||||
|
||||
# Make sure no untracked files:
|
||||
s = subprocess.check_output('git status', shell=True).decode('utf-8', errors='replace')
|
||||
if 'Untracked files:' in s:
|
||||
raise RuntimeError('git status shows untracked files: got:\n%s' % s)
|
||||
|
||||
# Make sure we have all changes from origin:
|
||||
if 'is behind' in s:
|
||||
raise RuntimeError('git status shows not all changes pulled from origin; try running "git pull origin" in this branch: got:\n%s' % (s))
|
||||
|
||||
# Make sure we no local unpushed changes (this is supposed to be a clean area):
|
||||
if 'is ahead' in s:
|
||||
raise RuntimeError('git status shows local commits; try running "git fetch origin", "git checkout ", "git reset --hard origin/" in this branch: got:\n%s' % (s))
|
||||
|
||||
# Reads the given file and applies the
|
||||
# callback to it. If the callback changed
|
||||
# a line the given file is replaced with
|
||||
# the modified input.
|
||||
def process_file(file_path, line_callback):
|
||||
fh, abs_path = tempfile.mkstemp()
|
||||
modified = False
|
||||
with open(abs_path,'w', encoding='utf-8') as new_file:
|
||||
with open(file_path, encoding='utf-8') as old_file:
|
||||
for line in old_file:
|
||||
new_line = line_callback(line)
|
||||
modified = modified or (new_line != line)
|
||||
new_file.write(new_line)
|
||||
os.close(fh)
|
||||
if modified:
|
||||
#Remove original file
|
||||
os.remove(file_path)
|
||||
#Move new file
|
||||
shutil.move(abs_path, file_path)
|
||||
return True
|
||||
else:
|
||||
# nothing to do - just remove the tmp file
|
||||
os.remove(abs_path)
|
||||
return False
|
||||
|
||||
# Moves the Version.java file from a snapshot to a release
|
||||
def remove_version_snapshot(version_file, release):
|
||||
# 1.0.0.Beta1 -> 1_0_0_Beta1
|
||||
release = release.replace('.', '_')
|
||||
release = release.replace('-', '_')
|
||||
pattern = 'new Version(V_%s_ID, true' % (release)
|
||||
replacement = 'new Version(V_%s_ID, false' % (release)
|
||||
def callback(line):
|
||||
return line.replace(pattern, replacement)
|
||||
processed = process_file(version_file, callback)
|
||||
if not processed:
|
||||
raise RuntimeError('failed to remove snapshot version for %s' % (release))
|
||||
|
||||
# Checks the pom.xml for the release version.
|
||||
# This method fails if the pom file has no SNAPSHOT version set ie.
|
||||
# if the version is already on a release version we fail.
|
||||
# Returns the next version string ie. 0.90.7
|
||||
def find_release_version():
|
||||
with open('pom.xml', encoding='utf-8') as file:
|
||||
for line in file:
|
||||
match = re.search(r'<version>(.+)-SNAPSHOT</version>', line)
|
||||
if match:
|
||||
return match.group(1)
|
||||
raise RuntimeError('Could not find release version in branch')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
release_version = find_release_version()
|
||||
|
||||
print('*** Preparing release version: [%s]' % release_version)
|
||||
|
||||
ensure_checkout_is_clean()
|
||||
|
||||
run('cd dev-tools && mvn versions:set -DnewVersion=%s -DgenerateBackupPoms=false' % (release_version))
|
||||
run('cd rest-api-spec && mvn versions:set -DnewVersion=%s -DgenerateBackupPoms=false' % (release_version))
|
||||
run('mvn versions:set -DnewVersion=%s -DgenerateBackupPoms=false' % (release_version))
|
||||
|
||||
remove_version_snapshot(VERSION_FILE, release_version)
|
||||
|
||||
print('*** Done removing snapshot version. DO NOT COMMIT THIS, WHEN CREATING A RELEASE CANDIDATE.')
|
||||
|
||||
shortHash = subprocess.check_output('git log --pretty=format:"%h" -n 1', shell=True).decode('utf-8')
|
||||
localRepo = '/tmp/elasticsearch-%s-%s' % (release_version, shortHash)
|
||||
localRepoElasticsearch = localRepo + '/org/elasticsearch'
|
||||
print('')
|
||||
print('*** To create a release candidate run: ')
|
||||
print(' mvn clean install deploy -Prelease -DskipTests -Dgpg.keyname="D88E42B4" -Dpackaging.rpm.rpmbuild=/usr/bin/rpmbuild -Drpm.sign=true -Dmaven.repo.local=%s -Dno.commit.pattern="\\bno(n|)commit\\b" -Dforbidden.test.signatures=""' % (localRepo))
|
||||
print(' 1. Remove all _remote.repositories: find %s -name _remote.repositories -exec rm {} \;' % (localRepoElasticsearch))
|
||||
print(' 2. Rename all maven metadata files: for i in $(find %s -name "maven-metadata-local.xml*") ; do mv "$i" "${i/-local/}" ; done' % (localRepoElasticsearch))
|
||||
print(' 3. Sync %s into S3 bucket' % (localRepoElasticsearch))
|
||||
print (' s3cmd sync %s s3://download.elasticsearch.org/elasticsearch/staging/elasticsearch-%s-%s/maven/org/' % (localRepoElasticsearch, release_version, shortHash))
|
||||
print(' 4. Create repositories: ')
|
||||
print (' export S3_BUCKET_SYNC_TO="download.elasticsearch.org/elasticsearch/staging/elasticsearch-%s-%s/repos"' % (release_version, shortHash))
|
||||
print (' export S3_BUCKET_SYNC_FROM="$S3_BUCKET_SYNC_TO"')
|
||||
print(' dev-tools/build_repositories.sh %s' % (release_version))
|
||||
print('')
|
||||
print('NOTE: the above mvn command will promt you several times for the GPG passphrase of the key you specified you can alternatively pass it via -Dgpg.passphrase=yourPassPhrase')
|
||||
print('NOTE: Running s3cmd might require you to create a config file with your credentials, if the s3cmd does not support suppliying them via the command line!')
|
@ -58,6 +58,9 @@ jvm=${elasticsearch.plugin.jvm}
|
||||
classname=${elasticsearch.plugin.classname}
|
||||
#
|
||||
# 'java.version' version of java the code is built against
|
||||
# use the system property java.specification.version
|
||||
# version string must be a sequence of nonnegative decimal integers
|
||||
# separated by "."'s and may have leading zeros
|
||||
java.version=${maven.compiler.target}
|
||||
#
|
||||
# 'elasticsearch.version' version of elasticsearch compiled against
|
||||
|
Loading…
x
Reference in New Issue
Block a user