Merge branch 'master' into feature/rank-eval

This commit is contained in:
Isabel Drost-Fromm 2016-12-15 10:29:26 +01:00
commit 5618d6ca49
69 changed files with 1998 additions and 998 deletions

View File

@ -474,10 +474,10 @@ gradle run --debug-jvm
== Building with extra plugins == Building with extra plugins
Additional plugins may be built alongside elasticsearch, where their Additional plugins may be built alongside elasticsearch, where their
dependency on elasticsearch will be substituted with the local elasticsearch dependency on elasticsearch will be substituted with the local elasticsearch
build. To add your plugin, create a directory called x-plugins as a sibling build. To add your plugin, create a directory called elasticsearch-extra as
of elasticsearch. Checkout your plugin underneath x-plugins and the build a sibling of elasticsearch. Checkout your plugin underneath elasticsearch-extra
will automatically pick it up. You can verify the plugin is included as part and the build will automatically pick it up. You can verify the plugin is
of the build by checking the projects of the build. included as part of the build by checking the projects of the build.
--------------------------------------------------------------------------- ---------------------------------------------------------------------------
gradle projects gradle projects

View File

@ -27,6 +27,7 @@ import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
@ -43,6 +44,8 @@ import java.util.stream.Collectors;
import static java.util.Collections.unmodifiableMap; import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_UUID_NA_VALUE; import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_UUID_NA_VALUE;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
/** /**
* A base class for all elasticsearch exceptions. * A base class for all elasticsearch exceptions.
@ -71,6 +74,14 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
private static final String RESOURCE_HEADER_TYPE_KEY = "es.resource.type"; private static final String RESOURCE_HEADER_TYPE_KEY = "es.resource.type";
private static final String RESOURCE_HEADER_ID_KEY = "es.resource.id"; private static final String RESOURCE_HEADER_ID_KEY = "es.resource.id";
private static final String TYPE = "type";
private static final String REASON = "reason";
private static final String CAUSED_BY = "caused_by";
private static final String STACK_TRACE = "stack_trace";
private static final String HEADER = "header";
private static final String ERROR = "error";
private static final String ROOT_CAUSE = "root_cause";
private static final Map<Integer, FunctionThatThrowsIOException<StreamInput, ? extends ElasticsearchException>> ID_TO_SUPPLIER; private static final Map<Integer, FunctionThatThrowsIOException<StreamInput, ? extends ElasticsearchException>> ID_TO_SUPPLIER;
private static final Map<Class<? extends ElasticsearchException>, ElasticsearchExceptionHandle> CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE; private static final Map<Class<? extends ElasticsearchException>, ElasticsearchExceptionHandle> CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE;
private final Map<String, List<String>> headers = new HashMap<>(); private final Map<String, List<String>> headers = new HashMap<>();
@ -247,8 +258,8 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
if (ex != this) { if (ex != this) {
toXContent(builder, params, this); toXContent(builder, params, this);
} else { } else {
builder.field("type", getExceptionName()); builder.field(TYPE, getExceptionName());
builder.field("reason", getMessage()); builder.field(REASON, getMessage());
for (String key : headers.keySet()) { for (String key : headers.keySet()) {
if (key.startsWith("es.")) { if (key.startsWith("es.")) {
List<String> values = headers.get(key); List<String> values = headers.get(key);
@ -258,7 +269,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
innerToXContent(builder, params); innerToXContent(builder, params);
renderHeader(builder, params); renderHeader(builder, params);
if (params.paramAsBoolean(REST_EXCEPTION_SKIP_STACK_TRACE, REST_EXCEPTION_SKIP_STACK_TRACE_DEFAULT) == false) { if (params.paramAsBoolean(REST_EXCEPTION_SKIP_STACK_TRACE, REST_EXCEPTION_SKIP_STACK_TRACE_DEFAULT) == false) {
builder.field("stack_trace", ExceptionsHelper.stackTrace(this)); builder.field(STACK_TRACE, ExceptionsHelper.stackTrace(this));
} }
} }
return builder; return builder;
@ -277,7 +288,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
protected void causeToXContent(XContentBuilder builder, Params params) throws IOException { protected void causeToXContent(XContentBuilder builder, Params params) throws IOException {
final Throwable cause = getCause(); final Throwable cause = getCause();
if (cause != null && params.paramAsBoolean(REST_EXCEPTION_SKIP_CAUSE, REST_EXCEPTION_SKIP_CAUSE_DEFAULT) == false) { if (cause != null && params.paramAsBoolean(REST_EXCEPTION_SKIP_CAUSE, REST_EXCEPTION_SKIP_CAUSE_DEFAULT) == false) {
builder.field("caused_by"); builder.field(CAUSED_BY);
builder.startObject(); builder.startObject();
toXContent(builder, params, cause); toXContent(builder, params, cause);
builder.endObject(); builder.endObject();
@ -291,7 +302,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
continue; continue;
} }
if (hasHeader == false) { if (hasHeader == false) {
builder.startObject("header"); builder.startObject(HEADER);
hasHeader = true; hasHeader = true;
} }
List<String> values = headers.get(key); List<String> values = headers.get(key);
@ -324,20 +335,74 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
if (ex instanceof ElasticsearchException) { if (ex instanceof ElasticsearchException) {
((ElasticsearchException) ex).toXContent(builder, params); ((ElasticsearchException) ex).toXContent(builder, params);
} else { } else {
builder.field("type", getExceptionName(ex)); builder.field(TYPE, getExceptionName(ex));
builder.field("reason", ex.getMessage()); builder.field(REASON, ex.getMessage());
if (ex.getCause() != null) { if (ex.getCause() != null) {
builder.field("caused_by"); builder.field(CAUSED_BY);
builder.startObject(); builder.startObject();
toXContent(builder, params, ex.getCause()); toXContent(builder, params, ex.getCause());
builder.endObject(); builder.endObject();
} }
if (params.paramAsBoolean(REST_EXCEPTION_SKIP_STACK_TRACE, REST_EXCEPTION_SKIP_STACK_TRACE_DEFAULT) == false) { if (params.paramAsBoolean(REST_EXCEPTION_SKIP_STACK_TRACE, REST_EXCEPTION_SKIP_STACK_TRACE_DEFAULT) == false) {
builder.field("stack_trace", ExceptionsHelper.stackTrace(ex)); builder.field(STACK_TRACE, ExceptionsHelper.stackTrace(ex));
} }
} }
} }
/**
* Generate a {@link ElasticsearchException} from a {@link XContentParser}. This does not
* return the original exception type (ie NodeClosedException for example) but just wraps
* the type, the reason and the cause of the exception. It also recursively parses the
* tree structure of the cause, returning it as a tree structure of {@link ElasticsearchException}
* instances.
*/
public static ElasticsearchException fromXContent(XContentParser parser) throws IOException {
XContentParser.Token token = ensureFieldName(parser.nextToken(), parser::getTokenLocation);
String type = null, reason = null, stack = null;
ElasticsearchException cause = null;
Map<String, Object> headers = new HashMap<>();
do {
String currentFieldName = parser.currentName();
token = parser.nextToken();
if (token.isValue()) {
if (TYPE.equals(currentFieldName)) {
type = parser.text();
} else if (REASON.equals(currentFieldName)) {
reason = parser.text();
} else if (STACK_TRACE.equals(currentFieldName)) {
stack = parser.text();
} else {
// Everything else is considered as a header
headers.put(currentFieldName, parser.text());
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (CAUSED_BY.equals(currentFieldName)) {
cause = fromXContent(parser);
} else if (HEADER.equals(currentFieldName)) {
headers.putAll(parser.map());
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
}
} while ((token = parser.nextToken()) == XContentParser.Token.FIELD_NAME);
StringBuilder message = new StringBuilder("Elasticsearch exception [");
message.append(TYPE).append('=').append(type).append(", ");
message.append(REASON).append('=').append(reason);
if (stack != null) {
message.append(", ").append(STACK_TRACE).append('=').append(stack);
}
message.append(']');
ElasticsearchException e = new ElasticsearchException(message.toString(), cause);
for (Map.Entry<String, Object> header : headers.entrySet()) {
e.addHeader(header.getKey(), String.valueOf(header.getValue()));
}
return e;
}
/** /**
* Returns the root cause of this exception or multiple if different shards caused different exceptions * Returns the root cause of this exception or multiple if different shards caused different exceptions
*/ */
@ -809,9 +874,9 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
} }
public static void renderException(XContentBuilder builder, Params params, Exception e) throws IOException { public static void renderException(XContentBuilder builder, Params params, Exception e) throws IOException {
builder.startObject("error"); builder.startObject(ERROR);
final ElasticsearchException[] rootCauses = ElasticsearchException.guessRootCauses(e); final ElasticsearchException[] rootCauses = ElasticsearchException.guessRootCauses(e);
builder.field("root_cause"); builder.field(ROOT_CAUSE);
builder.startArray(); builder.startArray();
for (ElasticsearchException rootCause : rootCauses) { for (ElasticsearchException rootCause : rootCauses) {
builder.startObject(); builder.startObject();

View File

@ -22,22 +22,20 @@ package org.elasticsearch.action.admin.cluster.repositories.put;
import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.action.ValidateActions.addValidationError;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream; import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
/** /**
* Register repository request. * Register repository request.
@ -198,18 +196,8 @@ public class PutRepositoryRequest extends AcknowledgedRequest<PutRepositoryReque
* *
* @param repositoryDefinition repository definition * @param repositoryDefinition repository definition
*/ */
public PutRepositoryRequest source(XContentBuilder repositoryDefinition) { public PutRepositoryRequest source(Map<String, Object> repositoryDefinition) {
return source(repositoryDefinition.bytes()); for (Map.Entry<String, Object> entry : repositoryDefinition.entrySet()) {
}
/**
* Parses repository definition.
*
* @param repositoryDefinition repository definition
*/
public PutRepositoryRequest source(Map repositoryDefinition) {
Map<String, Object> source = repositoryDefinition;
for (Map.Entry<String, Object> entry : source.entrySet()) {
String name = entry.getKey(); String name = entry.getKey();
if (name.equals("type")) { if (name.equals("type")) {
type(entry.getValue().toString()); type(entry.getValue().toString());
@ -217,64 +205,14 @@ public class PutRepositoryRequest extends AcknowledgedRequest<PutRepositoryReque
if (!(entry.getValue() instanceof Map)) { if (!(entry.getValue() instanceof Map)) {
throw new IllegalArgumentException("Malformed settings section, should include an inner object"); throw new IllegalArgumentException("Malformed settings section, should include an inner object");
} }
settings((Map<String, Object>) entry.getValue()); @SuppressWarnings("unchecked")
Map<String, Object> sub = (Map<String, Object>) entry.getValue();
settings(sub);
} }
} }
return this; return this;
} }
/**
* Parses repository definition.
* JSON, Smile and YAML formats are supported
*
* @param repositoryDefinition repository definition
*/
public PutRepositoryRequest source(String repositoryDefinition) {
try (XContentParser parser = XContentFactory.xContent(repositoryDefinition).createParser(repositoryDefinition)) {
return source(parser.mapOrdered());
} catch (IOException e) {
throw new IllegalArgumentException("failed to parse repository source [" + repositoryDefinition + "]", e);
}
}
/**
* Parses repository definition.
* JSON, Smile and YAML formats are supported
*
* @param repositoryDefinition repository definition
*/
public PutRepositoryRequest source(byte[] repositoryDefinition) {
return source(repositoryDefinition, 0, repositoryDefinition.length);
}
/**
* Parses repository definition.
* JSON, Smile and YAML formats are supported
*
* @param repositoryDefinition repository definition
*/
public PutRepositoryRequest source(byte[] repositoryDefinition, int offset, int length) {
try (XContentParser parser = XContentFactory.xContent(repositoryDefinition, offset, length).createParser(repositoryDefinition, offset, length)) {
return source(parser.mapOrdered());
} catch (IOException e) {
throw new IllegalArgumentException("failed to parse repository source", e);
}
}
/**
* Parses repository definition.
* JSON, Smile and YAML formats are supported
*
* @param repositoryDefinition repository definition
*/
public PutRepositoryRequest source(BytesReference repositoryDefinition) {
try (XContentParser parser = XContentFactory.xContent(repositoryDefinition).createParser(repositoryDefinition)) {
return source(parser.mapOrdered());
} catch (IOException e) {
throw new IllegalArgumentException("failed to parse template source", e);
}
}
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);

View File

@ -25,13 +25,11 @@ import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.action.support.master.MasterNodeRequest;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException; import java.io.IOException;
@ -41,10 +39,9 @@ import java.util.Map;
import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.action.ValidateActions.addValidationError;
import static org.elasticsearch.common.Strings.EMPTY_ARRAY; import static org.elasticsearch.common.Strings.EMPTY_ARRAY;
import static org.elasticsearch.common.Strings.hasLength;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream; import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
/** /**
@ -357,17 +354,7 @@ public class CreateSnapshotRequest extends MasterNodeRequest<CreateSnapshotReque
* @param source snapshot definition * @param source snapshot definition
* @return this request * @return this request
*/ */
public CreateSnapshotRequest source(XContentBuilder source) { public CreateSnapshotRequest source(Map<String, Object> source) {
return source(source.bytes());
}
/**
* Parses snapshot definition.
*
* @param source snapshot definition
* @return this request
*/
public CreateSnapshotRequest source(Map source) {
for (Map.Entry<String, Object> entry : ((Map<String, Object>) source).entrySet()) { for (Map.Entry<String, Object> entry : ((Map<String, Object>) source).entrySet()) {
String name = entry.getKey(); String name = entry.getKey();
if (name.equals("indices")) { if (name.equals("indices")) {
@ -393,66 +380,6 @@ public class CreateSnapshotRequest extends MasterNodeRequest<CreateSnapshotReque
return this; return this;
} }
/**
* Parses snapshot definition. JSON, YAML and properties formats are supported
*
* @param source snapshot definition
* @return this request
*/
public CreateSnapshotRequest source(String source) {
if (hasLength(source)) {
try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) {
return source(parser.mapOrdered());
} catch (Exception e) {
throw new IllegalArgumentException("failed to parse repository source [" + source + "]", e);
}
}
return this;
}
/**
* Parses snapshot definition. JSON, YAML and properties formats are supported
*
* @param source snapshot definition
* @return this request
*/
public CreateSnapshotRequest source(byte[] source) {
return source(source, 0, source.length);
}
/**
* Parses snapshot definition. JSON, YAML and properties formats are supported
*
* @param source snapshot definition
* @param offset offset
* @param length length
* @return this request
*/
public CreateSnapshotRequest source(byte[] source, int offset, int length) {
if (length > 0) {
try (XContentParser parser = XContentFactory.xContent(source, offset, length).createParser(source, offset, length)) {
return source(parser.mapOrdered());
} catch (IOException e) {
throw new IllegalArgumentException("failed to parse repository source", e);
}
}
return this;
}
/**
* Parses snapshot definition. JSON, YAML and properties formats are supported
*
* @param source snapshot definition
* @return this request
*/
public CreateSnapshotRequest source(BytesReference source) {
try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) {
return source(parser.mapOrdered());
} catch (IOException e) {
throw new IllegalArgumentException("failed to parse snapshot source", e);
}
}
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);

View File

@ -24,13 +24,11 @@ import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.action.support.master.MasterNodeRequest;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException; import java.io.IOException;
@ -39,10 +37,9 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.action.ValidateActions.addValidationError;
import static org.elasticsearch.common.Strings.hasLength;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream; import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
/** /**
@ -472,22 +469,8 @@ public class RestoreSnapshotRequest extends MasterNodeRequest<RestoreSnapshotReq
* @param source restore definition * @param source restore definition
* @return this request * @return this request
*/ */
public RestoreSnapshotRequest source(XContentBuilder source) { public RestoreSnapshotRequest source(Map<String, Object> source) {
try { for (Map.Entry<String, Object> entry : source.entrySet()) {
return source(source.bytes());
} catch (Exception e) {
throw new IllegalArgumentException("Failed to build json for repository request", e);
}
}
/**
* Parses restore definition
*
* @param source restore definition
* @return this request
*/
public RestoreSnapshotRequest source(Map source) {
for (Map.Entry<String, Object> entry : ((Map<String, Object>) source).entrySet()) {
String name = entry.getKey(); String name = entry.getKey();
if (name.equals("indices")) { if (name.equals("indices")) {
if (entry.getValue() instanceof String) { if (entry.getValue() instanceof String) {
@ -543,74 +526,6 @@ public class RestoreSnapshotRequest extends MasterNodeRequest<RestoreSnapshotReq
return this; return this;
} }
/**
* Parses restore definition
* <p>
* JSON, YAML and properties formats are supported
*
* @param source restore definition
* @return this request
*/
public RestoreSnapshotRequest source(String source) {
if (hasLength(source)) {
try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) {
return source(parser.mapOrdered());
} catch (Exception e) {
throw new IllegalArgumentException("failed to parse repository source [" + source + "]", e);
}
}
return this;
}
/**
* Parses restore definition
* <p>
* JSON, YAML and properties formats are supported
*
* @param source restore definition
* @return this request
*/
public RestoreSnapshotRequest source(byte[] source) {
return source(source, 0, source.length);
}
/**
* Parses restore definition
* <p>
* JSON, YAML and properties formats are supported
*
* @param source restore definition
* @param offset offset
* @param length length
* @return this request
*/
public RestoreSnapshotRequest source(byte[] source, int offset, int length) {
if (length > 0) {
try (XContentParser parser = XContentFactory.xContent(source, offset, length).createParser(source, offset, length)) {
return source(parser.mapOrdered());
} catch (IOException e) {
throw new IllegalArgumentException("failed to parse repository source", e);
}
}
return this;
}
/**
* Parses restore definition
* <p>
* JSON, YAML and properties formats are supported
*
* @param source restore definition
* @return this request
*/
public RestoreSnapshotRequest source(BytesReference source) {
try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) {
return source(parser.mapOrdered());
} catch (IOException e) {
throw new IllegalArgumentException("failed to parse template source", e);
}
}
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);

View File

@ -18,7 +18,6 @@
*/ */
package org.elasticsearch.action.admin.indices.rollover; package org.elasticsearch.action.admin.indices.rollover;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
@ -26,16 +25,11 @@ import org.elasticsearch.action.support.ActiveShardCount;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParseFieldMatcherSupplier; import org.elasticsearch.common.ParseFieldMatcherSupplier;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException; import java.io.IOException;
import java.util.HashSet; import java.util.HashSet;
@ -50,7 +44,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
*/ */
public class RolloverRequest extends AcknowledgedRequest<RolloverRequest> implements IndicesRequest { public class RolloverRequest extends AcknowledgedRequest<RolloverRequest> implements IndicesRequest {
public static ObjectParser<RolloverRequest, ParseFieldMatcherSupplier> PARSER = public static final ObjectParser<RolloverRequest, ParseFieldMatcherSupplier> PARSER =
new ObjectParser<>("conditions", null); new ObjectParser<>("conditions", null);
static { static {
PARSER.declareField((parser, request, parseFieldMatcherSupplier) -> PARSER.declareField((parser, request, parseFieldMatcherSupplier) ->
@ -194,19 +188,6 @@ public class RolloverRequest extends AcknowledgedRequest<RolloverRequest> implem
return createIndexRequest; return createIndexRequest;
} }
public void source(BytesReference source) {
XContentType xContentType = XContentFactory.xContentType(source);
if (xContentType != null) {
try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(source)) {
PARSER.parse(parser, this, () -> ParseFieldMatcher.EMPTY);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse source for rollover index", e);
}
} else {
throw new ElasticsearchParseException("failed to parse content type for rollover index source");
}
}
/** /**
* Sets the number of shard copies that should be active for creation of the * Sets the number of shard copies that should be active for creation of the
* new rollover index to return. Defaults to {@link ActiveShardCount#DEFAULT}, which will * new rollover index to return. Defaults to {@link ActiveShardCount#DEFAULT}, which will

View File

@ -18,7 +18,6 @@
*/ */
package org.elasticsearch.action.admin.indices.shrink; package org.elasticsearch.action.admin.indices.shrink;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
@ -26,15 +25,10 @@ import org.elasticsearch.action.support.ActiveShardCount;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParseFieldMatcherSupplier; import org.elasticsearch.common.ParseFieldMatcherSupplier;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException; import java.io.IOException;
import java.util.Objects; import java.util.Objects;
@ -46,7 +40,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
*/ */
public class ShrinkRequest extends AcknowledgedRequest<ShrinkRequest> implements IndicesRequest { public class ShrinkRequest extends AcknowledgedRequest<ShrinkRequest> implements IndicesRequest {
public static ObjectParser<ShrinkRequest, ParseFieldMatcherSupplier> PARSER = public static final ObjectParser<ShrinkRequest, ParseFieldMatcherSupplier> PARSER =
new ObjectParser<>("shrink_request", null); new ObjectParser<>("shrink_request", null);
static { static {
PARSER.declareField((parser, request, parseFieldMatcherSupplier) -> PARSER.declareField((parser, request, parseFieldMatcherSupplier) ->
@ -152,17 +146,4 @@ public class ShrinkRequest extends AcknowledgedRequest<ShrinkRequest> implements
public void setWaitForActiveShards(final int waitForActiveShards) { public void setWaitForActiveShards(final int waitForActiveShards) {
setWaitForActiveShards(ActiveShardCount.from(waitForActiveShards)); setWaitForActiveShards(ActiveShardCount.from(waitForActiveShards));
} }
public void source(BytesReference source) {
XContentType xContentType = XContentFactory.xContentType(source);
if (xContentType != null) {
try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(source)) {
PARSER.parse(parser, this, () -> ParseFieldMatcher.EMPTY);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse source for shrink index", e);
}
} else {
throw new ElasticsearchParseException("failed to parse content type for shrink index source");
}
}
} }

View File

@ -23,7 +23,6 @@ import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.CompositeIndicesRequest; import org.elasticsearch.action.CompositeIndicesRequest;
import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.ActiveShardCount;
@ -400,8 +399,10 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
UpdateRequest updateRequest = new UpdateRequest(index, type, id).routing(routing).parent(parent).retryOnConflict(retryOnConflict) UpdateRequest updateRequest = new UpdateRequest(index, type, id).routing(routing).parent(parent).retryOnConflict(retryOnConflict)
.version(version).versionType(versionType) .version(version).versionType(versionType)
.routing(routing) .routing(routing)
.parent(parent) .parent(parent);
.fromXContent(data.slice(from, nextMarker - from)); try (XContentParser sliceParser = xContent.createParser(data.slice(from, nextMarker - from))) {
updateRequest.fromXContent(sliceParser);
}
if (fetchSourceContext != null) { if (fetchSourceContext != null) {
updateRequest.fetchSource(fetchSourceContext); updateRequest.fetchSource(fetchSourceContext);
} }

View File

@ -34,6 +34,7 @@ import org.elasticsearch.rest.RestStatus;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Objects;
/** /**
* Base class for write action responses. * Base class for write action responses.
@ -120,6 +121,25 @@ public class ReplicationResponse extends ActionResponse {
return status; return status;
} }
@Override
public boolean equals(Object that) {
if (this == that) {
return true;
}
if (that == null || getClass() != that.getClass()) {
return false;
}
ShardInfo other = (ShardInfo) that;
return Objects.equals(total, other.total) &&
Objects.equals(successful, other.successful) &&
Arrays.equals(failures, other.failures);
}
@Override
public int hashCode() {
return Objects.hash(total, successful, failures);
}
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
total = in.readVInt(); total = in.readVInt();
@ -251,6 +271,27 @@ public class ReplicationResponse extends ActionResponse {
return primary; return primary;
} }
@Override
public boolean equals(Object that) {
if (this == that) {
return true;
}
if (that == null || getClass() != that.getClass()) {
return false;
}
Failure failure = (Failure) that;
return Objects.equals(primary, failure.primary) &&
Objects.equals(shardId, failure.shardId) &&
Objects.equals(nodeId, failure.nodeId) &&
Objects.equals(cause, failure.cause) &&
Objects.equals(status, failure.status);
}
@Override
public int hashCode() {
return Objects.hash(shardId, nodeId, cause, status, primary);
}
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
shardId = ShardId.readShardId(in); shardId = ShardId.readShardId(in);

View File

@ -28,8 +28,6 @@ import org.elasticsearch.action.support.replication.ReplicationRequest;
import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequest; import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequest;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.DeprecationLogger;
@ -689,18 +687,6 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
return upsertRequest; return upsertRequest;
} }
public UpdateRequest fromXContent(XContentBuilder source) throws Exception {
return fromXContent(source.bytes());
}
public UpdateRequest fromXContent(byte[] source) throws Exception {
return fromXContent(source, 0, source.length);
}
public UpdateRequest fromXContent(byte[] source, int offset, int length) throws Exception {
return fromXContent(new BytesArray(source, offset, length));
}
/** /**
* Should this update attempt to detect if it is a noop? Defaults to true. * Should this update attempt to detect if it is a noop? Defaults to true.
* @return this for chaining * @return this for chaining
@ -717,52 +703,48 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
return detectNoop; return detectNoop;
} }
public UpdateRequest fromXContent(BytesReference source) throws IOException { public UpdateRequest fromXContent(XContentParser parser) throws IOException {
Script script = null; Script script = null;
try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) { XContentParser.Token token = parser.nextToken();
XContentParser.Token token = parser.nextToken(); if (token == null) {
if (token == null) { return this;
return this; }
} String currentFieldName = null;
String currentFieldName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) {
if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName();
currentFieldName = parser.currentName(); } else if ("script".equals(currentFieldName)) {
} else if ("script".equals(currentFieldName)) { script = Script.parse(parser, ParseFieldMatcher.EMPTY);
script = Script.parse(parser, ParseFieldMatcher.EMPTY); } else if ("scripted_upsert".equals(currentFieldName)) {
} else if ("scripted_upsert".equals(currentFieldName)) { scriptedUpsert = parser.booleanValue();
scriptedUpsert = parser.booleanValue(); } else if ("upsert".equals(currentFieldName)) {
} else if ("upsert".equals(currentFieldName)) { XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType());
XContentType xContentType = XContentFactory.xContentType(source); builder.copyCurrentStructure(parser);
XContentBuilder builder = XContentFactory.contentBuilder(xContentType); safeUpsertRequest().source(builder);
builder.copyCurrentStructure(parser); } else if ("doc".equals(currentFieldName)) {
safeUpsertRequest().source(builder); XContentBuilder docBuilder = XContentFactory.contentBuilder(parser.contentType());
} else if ("doc".equals(currentFieldName)) { docBuilder.copyCurrentStructure(parser);
XContentType xContentType = XContentFactory.xContentType(source); safeDoc().source(docBuilder);
XContentBuilder docBuilder = XContentFactory.contentBuilder(xContentType); } else if ("doc_as_upsert".equals(currentFieldName)) {
docBuilder.copyCurrentStructure(parser); docAsUpsert(parser.booleanValue());
safeDoc().source(docBuilder); } else if ("detect_noop".equals(currentFieldName)) {
} else if ("doc_as_upsert".equals(currentFieldName)) { detectNoop(parser.booleanValue());
docAsUpsert(parser.booleanValue()); } else if ("fields".equals(currentFieldName)) {
} else if ("detect_noop".equals(currentFieldName)) { List<Object> fields = null;
detectNoop(parser.booleanValue()); if (token == XContentParser.Token.START_ARRAY) {
} else if ("fields".equals(currentFieldName)) { fields = (List) parser.list();
List<Object> fields = null; } else if (token.isValue()) {
if (token == XContentParser.Token.START_ARRAY) { fields = Collections.singletonList(parser.text());
fields = (List) parser.list();
} else if (token.isValue()) {
fields = Collections.singletonList(parser.text());
}
if (fields != null) {
fields(fields.toArray(new String[fields.size()]));
}
} else if ("_source".equals(currentFieldName)) {
fetchSourceContext = FetchSourceContext.parse(parser);
} }
if (fields != null) {
fields(fields.toArray(new String[fields.size()]));
}
} else if ("_source".equals(currentFieldName)) {
fetchSourceContext = FetchSourceContext.parse(parser);
} }
if (script != null) { }
this.script = script; if (script != null) {
} this.script = script;
} }
return this; return this;
} }

View File

@ -0,0 +1,87 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.xcontent;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import java.io.IOException;
import java.util.Locale;
import java.util.function.Supplier;
/**
* A set of static methods to get {@link Token} from {@link XContentParser}
* while checking for their types and throw {@link ParsingException} if needed.
*/
public final class XContentParserUtils {
private XContentParserUtils() {
}
/**
* Makes sure that current token is of type {@link XContentParser.Token#FIELD_NAME}
*
* @return the token
* @throws ParsingException if the token is not of type {@link XContentParser.Token#FIELD_NAME}
*/
public static Token ensureFieldName(Token token, Supplier<XContentLocation> location) throws IOException {
return ensureType(Token.FIELD_NAME, token, location);
}
/**
* Makes sure that current token is of type {@link XContentParser.Token#FIELD_NAME} and the the field name is equal to the provided one
*
* @return the token
* @throws ParsingException if the token is not of type {@link XContentParser.Token#FIELD_NAME} or is not equal to the given
* field name
*/
public static Token ensureFieldName(XContentParser parser, Token token, String fieldName) throws IOException {
Token t = ensureType(Token.FIELD_NAME, token, parser::getTokenLocation);
String current = parser.currentName() != null ? parser.currentName() : "<null>";
if (current.equals(fieldName) == false) {
String message = "Failed to parse object: expecting field with name [%s] but found [%s]";
throw new ParsingException(parser.getTokenLocation(), String.format(Locale.ROOT, message, fieldName, current));
}
return t;
}
/**
* @throws ParsingException with a "unknown field found" reason
*/
public static void throwUnknownField(String field, XContentLocation location) {
String message = "Failed to parse object: unknown field [%s] found";
throw new ParsingException(location, String.format(Locale.ROOT, message, field));
}
/**
* Makes sure that current token is of the expected type
*
* @return the token
* @throws ParsingException if the token is not equal to the expected type
*/
private static Token ensureType(Token expected, Token current, Supplier<XContentLocation> location) {
if (current != expected) {
String message = "Failed to parse object: expecting token of type [%s] but found [%s]";
throw new ParsingException(location.get(), String.format(Locale.ROOT, message, expected, current));
}
return current;
}
}

View File

@ -468,18 +468,26 @@ public class NodeJoinController extends AbstractComponent {
private ClusterState.Builder becomeMasterAndTrimConflictingNodes(ClusterState currentState, List<DiscoveryNode> joiningNodes) { private ClusterState.Builder becomeMasterAndTrimConflictingNodes(ClusterState currentState, List<DiscoveryNode> joiningNodes) {
assert currentState.nodes().getMasterNodeId() == null : currentState; assert currentState.nodes().getMasterNodeId() == null : currentState;
DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(currentState.nodes()); DiscoveryNodes currentNodes = currentState.nodes();
DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(currentNodes);
nodesBuilder.masterNodeId(currentState.nodes().getLocalNodeId()); nodesBuilder.masterNodeId(currentState.nodes().getLocalNodeId());
ClusterBlocks clusterBlocks = ClusterBlocks.builder().blocks(currentState.blocks()) ClusterBlocks clusterBlocks = ClusterBlocks.builder().blocks(currentState.blocks())
.removeGlobalBlock(discoverySettings.getNoMasterBlock()).build(); .removeGlobalBlock(discoverySettings.getNoMasterBlock()).build();
for (final DiscoveryNode joiningNode : joiningNodes) { for (final DiscoveryNode joiningNode : joiningNodes) {
final DiscoveryNode existingNode = nodesBuilder.get(joiningNode.getId()); final DiscoveryNode nodeWithSameId = nodesBuilder.get(joiningNode.getId());
if (existingNode != null && existingNode.equals(joiningNode) == false) { if (nodeWithSameId != null && nodeWithSameId.equals(joiningNode) == false) {
logger.debug("removing existing node [{}], which conflicts with incoming join from [{}]", existingNode, joiningNode); logger.debug("removing existing node [{}], which conflicts with incoming join from [{}]", nodeWithSameId, joiningNode);
nodesBuilder.remove(existingNode.getId()); nodesBuilder.remove(nodeWithSameId.getId());
}
final DiscoveryNode nodeWithSameAddress = currentNodes.findByAddress(joiningNode.getAddress());
if (nodeWithSameAddress != null && nodeWithSameAddress.equals(joiningNode) == false) {
logger.debug("removing existing node [{}], which conflicts with incoming join from [{}]", nodeWithSameAddress,
joiningNode);
nodesBuilder.remove(nodeWithSameAddress.getId());
} }
} }
// now trim any left over dead nodes - either left there when the previous master stepped down // now trim any left over dead nodes - either left there when the previous master stepped down
// or removed by us above // or removed by us above
ClusterState tmpState = ClusterState.builder(currentState).nodes(nodesBuilder).blocks(clusterBlocks).build(); ClusterState tmpState = ClusterState.builder(currentState).nodes(nodesBuilder).blocks(clusterBlocks).build();

View File

@ -327,6 +327,11 @@ public class DocumentMapper implements ToXContent {
*/ */
public DocumentMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) { public DocumentMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
Mapping updated = this.mapping.updateFieldType(fullNameToFieldType); Mapping updated = this.mapping.updateFieldType(fullNameToFieldType);
if (updated == this.mapping) {
// no change
return this;
}
assert updated == updated.updateFieldType(fullNameToFieldType) : "updateFieldType operation is not idempotent";
return new DocumentMapper(mapperService, updated); return new DocumentMapper(mapperService, updated);
} }

View File

@ -93,7 +93,7 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
// is the update even legal? // is the update even legal?
checkCompatibility(type, fieldMapper, updateAllTypes); checkCompatibility(type, fieldMapper, updateAllTypes);
if (fieldType != fullNameFieldType) { if (fieldType.equals(fullNameFieldType) == false) {
fullName = fullName.copyAndPut(fieldType.name(), fieldMapper.fieldType()); fullName = fullName.copyAndPut(fieldType.name(), fieldMapper.fieldType());
} }

View File

@ -104,12 +104,22 @@ public final class Mapping implements ToXContent {
* Recursively update sub field types. * Recursively update sub field types.
*/ */
public Mapping updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) { public Mapping updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
final MetadataFieldMapper[] updatedMeta = Arrays.copyOf(metadataMappers, metadataMappers.length); MetadataFieldMapper[] updatedMeta = null;
for (int i = 0; i < updatedMeta.length; ++i) { for (int i = 0; i < metadataMappers.length; ++i) {
updatedMeta[i] = (MetadataFieldMapper) updatedMeta[i].updateFieldType(fullNameToFieldType); MetadataFieldMapper currentFieldMapper = metadataMappers[i];
MetadataFieldMapper updatedFieldMapper = (MetadataFieldMapper) currentFieldMapper.updateFieldType(fullNameToFieldType);
if (updatedFieldMapper != currentFieldMapper) {
if (updatedMeta == null) {
updatedMeta = Arrays.copyOf(metadataMappers, metadataMappers.length);
}
updatedMeta[i] = updatedFieldMapper;
}
} }
RootObjectMapper updatedRoot = root.updateFieldType(fullNameToFieldType); RootObjectMapper updatedRoot = root.updateFieldType(fullNameToFieldType);
return new Mapping(indexCreated, updatedRoot, updatedMeta, meta); if (updatedMeta == null && updatedRoot == root) {
return this;
}
return new Mapping(indexCreated, updatedRoot, updatedMeta == null ? metadataMappers : updatedMeta, meta);
} }
@Override @Override

View File

@ -19,7 +19,6 @@
package org.elasticsearch.rest; package org.elasticsearch.rest;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.CheckedConsumer;
@ -229,6 +228,30 @@ public abstract class RestRequest implements ToXContent.Params {
return params; return params;
} }
/**
* A parser for the contents of this request if there is a body, otherwise throws an {@link ElasticsearchParseException}. Use
* {@link #applyContentParser(CheckedConsumer)} if you want to gracefully handle when the request doesn't have any contents. Use
* {@link #contentOrSourceParamParser()} for requests that support specifying the request body in the {@code source} param.
*/
public final XContentParser contentParser() throws IOException {
BytesReference content = content();
if (content.length() == 0) {
throw new ElasticsearchParseException("Body required");
}
return XContentFactory.xContent(content).createParser(content);
}
/**
* If there is any content then call {@code applyParser} with the parser, otherwise do nothing.
*/
public final void applyContentParser(CheckedConsumer<XContentParser, IOException> applyParser) throws IOException {
if (hasContent()) {
try (XContentParser parser = contentParser()) {
applyParser.accept(parser);
}
}
}
/** /**
* Does this request have content or a {@code source} parameter? Use this instead of {@link #hasContent()} if this * Does this request have content or a {@code source} parameter? Use this instead of {@link #hasContent()} if this
* {@linkplain RestHandler} treats the {@code source} parameter like the body content. * {@linkplain RestHandler} treats the {@code source} parameter like the body content.
@ -256,16 +279,13 @@ public abstract class RestRequest implements ToXContent.Params {
* back to the user when there isn't request content. * back to the user when there isn't request content.
*/ */
public final void withContentOrSourceParamParserOrNull(CheckedConsumer<XContentParser, IOException> withParser) throws IOException { public final void withContentOrSourceParamParserOrNull(CheckedConsumer<XContentParser, IOException> withParser) throws IOException {
XContentParser parser = null;
BytesReference content = contentOrSourceParam(); BytesReference content = contentOrSourceParam();
if (content.length() > 0) { if (content.length() > 0) {
parser = XContentFactory.xContent(content).createParser(content); try (XContentParser parser = XContentFactory.xContent(content).createParser(content)) {
} withParser.accept(parser);
}
try { } else {
withParser.accept(parser); withParser.accept(null);
} finally {
IOUtils.close(parser);
} }
} }

View File

@ -121,11 +121,7 @@ public class RestClusterRerouteAction extends BaseRestHandler {
clusterRerouteRequest.timeout(request.paramAsTime("timeout", clusterRerouteRequest.timeout())); clusterRerouteRequest.timeout(request.paramAsTime("timeout", clusterRerouteRequest.timeout()));
clusterRerouteRequest.setRetryFailed(request.paramAsBoolean("retry_failed", clusterRerouteRequest.isRetryFailed())); clusterRerouteRequest.setRetryFailed(request.paramAsBoolean("retry_failed", clusterRerouteRequest.isRetryFailed()));
clusterRerouteRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterRerouteRequest.masterNodeTimeout())); clusterRerouteRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterRerouteRequest.masterNodeTimeout()));
if (request.hasContent()) { request.applyContentParser(parser -> PARSER.parse(parser, clusterRerouteRequest, new ParseContext(registry, parseFieldMatcher)));
try (XContentParser parser = XContentHelper.createParser(request.content())) {
PARSER.parse(parser, clusterRerouteRequest, new ParseContext(registry, parseFieldMatcher));
}
}
return clusterRerouteRequest; return clusterRerouteRequest;
} }

View File

@ -26,7 +26,6 @@ import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestController;
@ -52,7 +51,7 @@ public class RestClusterUpdateSettingsAction extends BaseRestHandler {
clusterUpdateSettingsRequest.masterNodeTimeout( clusterUpdateSettingsRequest.masterNodeTimeout(
request.paramAsTime("master_timeout", clusterUpdateSettingsRequest.masterNodeTimeout())); request.paramAsTime("master_timeout", clusterUpdateSettingsRequest.masterNodeTimeout()));
Map<String, Object> source; Map<String, Object> source;
try (XContentParser parser = XContentFactory.xContent(request.content()).createParser(request.content())) { try (XContentParser parser = request.contentParser()) {
source = parser.map(); source = parser.map();
} }
if (source.containsKey("transient")) { if (source.containsKey("transient")) {

View File

@ -49,7 +49,7 @@ public class RestCreateSnapshotAction extends BaseRestHandler {
@Override @Override
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
CreateSnapshotRequest createSnapshotRequest = createSnapshotRequest(request.param("repository"), request.param("snapshot")); CreateSnapshotRequest createSnapshotRequest = createSnapshotRequest(request.param("repository"), request.param("snapshot"));
createSnapshotRequest.source(request.content().utf8ToString()); request.applyContentParser(p -> createSnapshotRequest.source(p.mapOrdered()));
createSnapshotRequest.masterNodeTimeout(request.paramAsTime("master_timeout", createSnapshotRequest.masterNodeTimeout())); createSnapshotRequest.masterNodeTimeout(request.paramAsTime("master_timeout", createSnapshotRequest.masterNodeTimeout()));
createSnapshotRequest.waitForCompletion(request.paramAsBoolean("wait_for_completion", false)); createSnapshotRequest.waitForCompletion(request.paramAsBoolean("wait_for_completion", false));
return channel -> client.admin().cluster().createSnapshot(createSnapshotRequest, new RestToXContentListener<>(channel)); return channel -> client.admin().cluster().createSnapshot(createSnapshotRequest, new RestToXContentListener<>(channel));

View File

@ -23,6 +23,7 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequ
import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest;
@ -50,7 +51,9 @@ public class RestPutRepositoryAction extends BaseRestHandler {
@Override @Override
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
PutRepositoryRequest putRepositoryRequest = putRepositoryRequest(request.param("repository")); PutRepositoryRequest putRepositoryRequest = putRepositoryRequest(request.param("repository"));
putRepositoryRequest.source(request.content().utf8ToString()); try (XContentParser parser = request.contentParser()) {
putRepositoryRequest.source(parser.mapOrdered());
}
putRepositoryRequest.verify(request.paramAsBoolean("verify", true)); putRepositoryRequest.verify(request.paramAsBoolean("verify", true));
putRepositoryRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putRepositoryRequest.masterNodeTimeout())); putRepositoryRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putRepositoryRequest.masterNodeTimeout()));
putRepositoryRequest.timeout(request.paramAsTime("timeout", putRepositoryRequest.timeout())); putRepositoryRequest.timeout(request.paramAsTime("timeout", putRepositoryRequest.timeout()));

View File

@ -49,7 +49,7 @@ public class RestRestoreSnapshotAction extends BaseRestHandler {
RestoreSnapshotRequest restoreSnapshotRequest = restoreSnapshotRequest(request.param("repository"), request.param("snapshot")); RestoreSnapshotRequest restoreSnapshotRequest = restoreSnapshotRequest(request.param("repository"), request.param("snapshot"));
restoreSnapshotRequest.masterNodeTimeout(request.paramAsTime("master_timeout", restoreSnapshotRequest.masterNodeTimeout())); restoreSnapshotRequest.masterNodeTimeout(request.paramAsTime("master_timeout", restoreSnapshotRequest.masterNodeTimeout()));
restoreSnapshotRequest.waitForCompletion(request.paramAsBoolean("wait_for_completion", false)); restoreSnapshotRequest.waitForCompletion(request.paramAsBoolean("wait_for_completion", false));
restoreSnapshotRequest.source(request.content().utf8ToString()); request.applyContentParser(p -> restoreSnapshotRequest.source(p.mapOrdered()));
return channel -> client.admin().cluster().restoreSnapshot(restoreSnapshotRequest, new RestToXContentListener<>(channel)); return channel -> client.admin().cluster().restoreSnapshot(restoreSnapshotRequest, new RestToXContentListener<>(channel));
} }
} }

View File

@ -41,7 +41,6 @@ public class RestCreateIndexAction extends BaseRestHandler {
controller.registerHandler(RestRequest.Method.PUT, "/{index}", this); controller.registerHandler(RestRequest.Method.PUT, "/{index}", this);
} }
@SuppressWarnings({"unchecked"})
@Override @Override
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
CreateIndexRequest createIndexRequest = new CreateIndexRequest(request.param("index")); CreateIndexRequest createIndexRequest = new CreateIndexRequest(request.param("index"));

View File

@ -67,7 +67,7 @@ public class RestIndexPutAliasAction extends BaseRestHandler {
String searchRouting = null; String searchRouting = null;
if (request.hasContent()) { if (request.hasContent()) {
try (XContentParser parser = XContentFactory.xContent(request.content()).createParser(request.content())) { try (XContentParser parser = request.contentParser()) {
XContentParser.Token token = parser.nextToken(); XContentParser.Token token = parser.nextToken();
if (token == null) { if (token == null) {
throw new IllegalArgumentException("No index alias is specified"); throw new IllegalArgumentException("No index alias is specified");

View File

@ -21,7 +21,6 @@ package org.elasticsearch.rest.action.admin.indices;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse;
import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcher;
@ -29,7 +28,6 @@ import org.elasticsearch.common.ParseFieldMatcherSupplier;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestController;
@ -61,7 +59,7 @@ public class RestIndicesAliasesAction extends BaseRestHandler {
IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest(); IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest();
indicesAliasesRequest.masterNodeTimeout(request.paramAsTime("master_timeout", indicesAliasesRequest.masterNodeTimeout())); indicesAliasesRequest.masterNodeTimeout(request.paramAsTime("master_timeout", indicesAliasesRequest.masterNodeTimeout()));
indicesAliasesRequest.timeout(request.paramAsTime("timeout", indicesAliasesRequest.timeout())); indicesAliasesRequest.timeout(request.paramAsTime("timeout", indicesAliasesRequest.timeout()));
try (XContentParser parser = XContentFactory.xContent(request.content()).createParser(request.content())) { try (XContentParser parser = request.contentParser()) {
PARSER.parse(parser, indicesAliasesRequest, () -> ParseFieldMatcher.STRICT); PARSER.parse(parser, indicesAliasesRequest, () -> ParseFieldMatcher.STRICT);
} }
if (indicesAliasesRequest.getAliasActions().isEmpty()) { if (indicesAliasesRequest.getAliasActions().isEmpty()) {

View File

@ -22,6 +22,7 @@ package org.elasticsearch.rest.action.admin.indices;
import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest;
import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.ActiveShardCount;
import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BaseRestHandler;
@ -40,13 +41,10 @@ public class RestRolloverIndexAction extends BaseRestHandler {
controller.registerHandler(RestRequest.Method.POST, "/{index}/_rollover/{new_index}", this); controller.registerHandler(RestRequest.Method.POST, "/{index}/_rollover/{new_index}", this);
} }
@SuppressWarnings({"unchecked"})
@Override @Override
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
RolloverRequest rolloverIndexRequest = new RolloverRequest(request.param("index"), request.param("new_index")); RolloverRequest rolloverIndexRequest = new RolloverRequest(request.param("index"), request.param("new_index"));
if (request.hasContent()) { request.applyContentParser(parser -> RolloverRequest.PARSER.parse(parser, rolloverIndexRequest, () -> ParseFieldMatcher.EMPTY));
rolloverIndexRequest.source(request.content());
}
rolloverIndexRequest.dryRun(request.paramAsBoolean("dry_run", false)); rolloverIndexRequest.dryRun(request.paramAsBoolean("dry_run", false));
rolloverIndexRequest.timeout(request.paramAsTime("timeout", rolloverIndexRequest.timeout())); rolloverIndexRequest.timeout(request.paramAsTime("timeout", rolloverIndexRequest.timeout()));
rolloverIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", rolloverIndexRequest.masterNodeTimeout())); rolloverIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", rolloverIndexRequest.masterNodeTimeout()));

View File

@ -23,6 +23,7 @@ import org.elasticsearch.action.admin.indices.shrink.ShrinkRequest;
import org.elasticsearch.action.admin.indices.shrink.ShrinkResponse; import org.elasticsearch.action.admin.indices.shrink.ShrinkResponse;
import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.ActiveShardCount;
import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
@ -42,7 +43,6 @@ public class RestShrinkIndexAction extends BaseRestHandler {
controller.registerHandler(RestRequest.Method.POST, "/{index}/_shrink/{target}", this); controller.registerHandler(RestRequest.Method.POST, "/{index}/_shrink/{target}", this);
} }
@SuppressWarnings({"unchecked"})
@Override @Override
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
if (request.param("target") == null) { if (request.param("target") == null) {
@ -52,9 +52,7 @@ public class RestShrinkIndexAction extends BaseRestHandler {
throw new IllegalArgumentException("no source index"); throw new IllegalArgumentException("no source index");
} }
ShrinkRequest shrinkIndexRequest = new ShrinkRequest(request.param("target"), request.param("index")); ShrinkRequest shrinkIndexRequest = new ShrinkRequest(request.param("target"), request.param("index"));
if (request.hasContent()) { request.applyContentParser(parser -> ShrinkRequest.PARSER.parse(parser, shrinkIndexRequest, () -> ParseFieldMatcher.EMPTY));
shrinkIndexRequest.source(request.content());
}
shrinkIndexRequest.timeout(request.paramAsTime("timeout", shrinkIndexRequest.timeout())); shrinkIndexRequest.timeout(request.paramAsTime("timeout", shrinkIndexRequest.timeout()));
shrinkIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", shrinkIndexRequest.masterNodeTimeout())); shrinkIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", shrinkIndexRequest.masterNodeTimeout()));
shrinkIndexRequest.setWaitForActiveShards(ActiveShardCount.parseString(request.param("wait_for_active_shards"))); shrinkIndexRequest.setWaitForActiveShards(ActiveShardCount.parseString(request.param("wait_for_active_shards")));

View File

@ -80,9 +80,8 @@ public class RestUpdateAction extends BaseRestHandler {
updateRequest.versionType(VersionType.fromString(request.param("version_type"), updateRequest.versionType())); updateRequest.versionType(VersionType.fromString(request.param("version_type"), updateRequest.versionType()));
// see if we have it in the body request.applyContentParser(parser -> {
if (request.hasContent()) { updateRequest.fromXContent(parser);
updateRequest.fromXContent(request.content());
IndexRequest upsertRequest = updateRequest.upsertRequest(); IndexRequest upsertRequest = updateRequest.upsertRequest();
if (upsertRequest != null) { if (upsertRequest != null) {
upsertRequest.routing(request.param("routing")); upsertRequest.routing(request.param("routing"));
@ -97,7 +96,7 @@ public class RestUpdateAction extends BaseRestHandler {
doc.version(RestActions.parseVersion(request)); doc.version(RestActions.parseVersion(request));
doc.versionType(VersionType.fromString(request.param("version_type"), doc.versionType())); doc.versionType(VersionType.fromString(request.param("version_type"), doc.versionType()));
} }
} });
return channel -> return channel ->
client.update(updateRequest, new RestStatusToXContentListener<>(channel, r -> r.getLocation(updateRequest.routing()))); client.update(updateRequest, new RestStatusToXContentListener<>(channel, r -> r.getLocation(updateRequest.routing())));

View File

@ -56,10 +56,14 @@ public class RestSuggestAction extends BaseRestHandler {
SearchRequestParsers searchRequestParsers) { SearchRequestParsers searchRequestParsers) {
super(settings); super(settings);
this.searchRequestParsers = searchRequestParsers; this.searchRequestParsers = searchRequestParsers;
controller.registerHandler(POST, "/_suggest", this); controller.registerAsDeprecatedHandler(POST, "/_suggest", this,
controller.registerHandler(GET, "/_suggest", this); "[POST /_suggest] is deprecated! Use [POST /_search] instead.", deprecationLogger);
controller.registerHandler(POST, "/{index}/_suggest", this); controller.registerAsDeprecatedHandler(GET, "/_suggest", this,
controller.registerHandler(GET, "/{index}/_suggest", this); "[GET /_suggest] is deprecated! Use [GET /_search] instead.", deprecationLogger);
controller.registerAsDeprecatedHandler(POST, "/{index}/_suggest", this,
"[POST /{index}/_suggest] is deprecated! Use [POST /{index}/_search] instead.", deprecationLogger);
controller.registerAsDeprecatedHandler(GET, "/{index}/_suggest", this,
"[GET /{index}/_suggest] is deprecated! Use [GET /{index}/_search] instead.", deprecationLogger);
} }
@Override @Override

View File

@ -20,8 +20,6 @@ package org.elasticsearch.transport;
import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.IntHashSet;
import com.carrotsearch.hppc.IntSet; import com.carrotsearch.hppc.IntSet;
import com.carrotsearch.hppc.LongObjectHashMap;
import com.carrotsearch.hppc.LongObjectMap;
import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier; import org.apache.logging.log4j.util.Supplier;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
@ -88,7 +86,9 @@ import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
@ -180,7 +180,7 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
private final String transportName; private final String transportName;
protected final ConnectionProfile defaultConnectionProfile; protected final ConnectionProfile defaultConnectionProfile;
private final LongObjectMap<TransportResponseHandler<?>> pendingHandshakes = new LongObjectHashMap<>(); private final ConcurrentMap<Long, HandshakeResponseHandler> pendingHandshakes = new ConcurrentHashMap<>();
private final AtomicLong requestIdGenerator = new AtomicLong(); private final AtomicLong requestIdGenerator = new AtomicLong();
private final CounterMetric numHandshakes = new CounterMetric(); private final CounterMetric numHandshakes = new CounterMetric();
private static final String HANDSHAKE_ACTION_NAME = "internal:tcp/handshake"; private static final String HANDSHAKE_ACTION_NAME = "internal:tcp/handshake";
@ -242,6 +242,51 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
this.transportServiceAdapter = service; this.transportServiceAdapter = service;
} }
private static class HandshakeResponseHandler<Channel> implements TransportResponseHandler<VersionHandshakeResponse> {
final AtomicReference<Version> versionRef = new AtomicReference<>();
final CountDownLatch latch = new CountDownLatch(1);
final AtomicBoolean handshakeNotSupported = new AtomicBoolean(false);
final AtomicReference<Exception> exceptionRef = new AtomicReference<>();
final Channel channel;
public HandshakeResponseHandler(Channel channel) {
this.channel = channel;
}
@Override
public VersionHandshakeResponse newInstance() {
return new VersionHandshakeResponse();
}
@Override
public void handleResponse(VersionHandshakeResponse response) {
final boolean success = versionRef.compareAndSet(null, response.version);
assert success;
latch.countDown();
}
@Override
public void handleException(TransportException exp) {
Throwable cause = exp.getCause();
if (cause != null
&& cause instanceof ActionNotFoundTransportException
// this will happen if we talk to a node (pre 5.2) that doesn't have a handshake handler
// we will just treat the node as a 5.0.0 node unless the discovery node that is used to connect has a higher version.
&& cause.getMessage().equals("No handler for action [internal:tcp/handshake]")) {
handshakeNotSupported.set(true);
} else {
final boolean success = exceptionRef.compareAndSet(null, exp);
assert success;
}
latch.countDown();
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
}
public class ScheduledPing extends AbstractLifecycleRunnable { public class ScheduledPing extends AbstractLifecycleRunnable {
/** /**
@ -462,9 +507,17 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
@Override @Override
public final NodeChannels openConnection(DiscoveryNode node, ConnectionProfile profile) throws IOException { public final NodeChannels openConnection(DiscoveryNode node, ConnectionProfile profile) throws IOException {
NodeChannels nodeChannels = connectToChannels(node, profile); try {
transportServiceAdapter.onConnectionOpened(node); NodeChannels nodeChannels = connectToChannels(node, profile);
return nodeChannels; transportServiceAdapter.onConnectionOpened(node);
return nodeChannels;
} catch (ConnectTransportException e) {
throw e;
} catch (Exception e) {
// ConnectTransportExceptions are handled specifically on the caller end - we wrap the actual exception to ensure
// only relevant exceptions are logged on the caller end.. this is the same as in connectToNode
throw new ConnectTransportException(node, "general node connection failure", e);
}
} }
/** /**
@ -1466,47 +1519,12 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
// pkg private for testing // pkg private for testing
final Version executeHandshake(DiscoveryNode node, Channel channel, TimeValue timeout) throws IOException, InterruptedException { final Version executeHandshake(DiscoveryNode node, Channel channel, TimeValue timeout) throws IOException, InterruptedException {
CountDownLatch latch = new CountDownLatch(1);
AtomicReference<Version> versionRef = new AtomicReference<>();
AtomicReference<Exception> exceptionRef = new AtomicReference<>();
AtomicBoolean handshakeNotSupported = new AtomicBoolean(false);
numHandshakes.inc(); numHandshakes.inc();
final long requestId = newRequestId(); final long requestId = newRequestId();
pendingHandshakes.put(requestId, new TransportResponseHandler<VersionHandshakeResponse>() { final HandshakeResponseHandler handler = new HandshakeResponseHandler(channel);
AtomicReference<Version> versionRef = handler.versionRef;
@Override AtomicReference<Exception> exceptionRef = handler.exceptionRef;
public VersionHandshakeResponse newInstance() { pendingHandshakes.put(requestId, handler);
return new VersionHandshakeResponse();
}
@Override
public void handleResponse(VersionHandshakeResponse response) {
final boolean success = versionRef.compareAndSet(null, response.version);
assert success;
latch.countDown();
}
@Override
public void handleException(TransportException exp) {
Throwable cause = exp.getCause();
if (cause != null
&& cause instanceof ActionNotFoundTransportException
// this will happen if we talk to a node (pre 5.2) that doesn't haven a handshake handler
// we will just treat the node as a 5.0.0 node unless the discovery node that is used to connect has a higher version.
&& cause.getMessage().equals("No handler for action [internal:tcp/handshake]")) {
handshakeNotSupported.set(true);
} else {
final boolean success = exceptionRef.compareAndSet(null, exp);
assert success;
}
latch.countDown();
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
});
boolean success = false; boolean success = false;
try { try {
// for the request we use the minCompatVersion since we don't know what's the version of the node we talk to // for the request we use the minCompatVersion since we don't know what's the version of the node we talk to
@ -1515,11 +1533,11 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
final Version minCompatVersion = getCurrentVersion().minimumCompatibilityVersion(); final Version minCompatVersion = getCurrentVersion().minimumCompatibilityVersion();
sendRequestToChannel(node, channel, requestId, HANDSHAKE_ACTION_NAME, TransportRequest.Empty.INSTANCE, sendRequestToChannel(node, channel, requestId, HANDSHAKE_ACTION_NAME, TransportRequest.Empty.INSTANCE,
TransportRequestOptions.EMPTY, minCompatVersion, TransportStatus.setHandshake((byte)0)); TransportRequestOptions.EMPTY, minCompatVersion, TransportStatus.setHandshake((byte)0));
if (latch.await(timeout.millis(), TimeUnit.MILLISECONDS) == false) { if (handler.latch.await(timeout.millis(), TimeUnit.MILLISECONDS) == false) {
throw new ConnectTransportException(node, "handshake_timeout[" + timeout + "]"); throw new ConnectTransportException(node, "handshake_timeout[" + timeout + "]");
} }
success = true; success = true;
if (handshakeNotSupported.get()) { if (handler.handshakeNotSupported.get()) {
// this is a BWC layer, if we talk to a pre 5.2 node then the handshake is not supported // this is a BWC layer, if we talk to a pre 5.2 node then the handshake is not supported
// this will go away in master once it's all ported to 5.2 but for now we keep this to make // this will go away in master once it's all ported to 5.2 but for now we keep this to make
// the backport straight forward // the backport straight forward
@ -1555,4 +1573,18 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
public long newRequestId() { public long newRequestId() {
return requestIdGenerator.incrementAndGet(); return requestIdGenerator.incrementAndGet();
} }
/**
* Called by sub-classes for each channel that is closed
*/
protected final void onChannelClosed(Channel channel) {
Optional<Map.Entry<Long, HandshakeResponseHandler>> first = pendingHandshakes.entrySet().stream()
.filter((entry) -> entry.getValue().channel == channel).findFirst();
if(first.isPresent()) {
final Long requestId = first.get().getKey();
HandshakeResponseHandler handler = first.get().getValue();
pendingHandshakes.remove(requestId);
handler.handleException(new TransportException("connection reset"));
}
}
} }

View File

@ -0,0 +1,264 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch;
import org.elasticsearch.action.RoutingMissingException;
import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.discovery.DiscoverySettings;
import org.elasticsearch.index.shard.IndexShardRecoveringException;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matcher;
import java.io.IOException;
import java.util.Collections;
import static java.util.Collections.singleton;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.hasItem;
import static org.hamcrest.CoreMatchers.startsWith;
import static org.hamcrest.Matchers.hasSize;
public class ElasticsearchExceptionTests extends ESTestCase {
public void testToXContent() throws IOException {
ElasticsearchException e = new ElasticsearchException("test");
assertExceptionAsJson(e, false, equalTo("{\"type\":\"exception\",\"reason\":\"test\"}"));
e = new IndexShardRecoveringException(new ShardId("_test", "_0", 5));
assertExceptionAsJson(e, false, equalTo("{\"type\":\"index_shard_recovering_exception\"," +
"\"reason\":\"CurrentState[RECOVERING] Already recovering\",\"index_uuid\":\"_0\",\"shard\":\"5\",\"index\":\"_test\"}"));
e = new BroadcastShardOperationFailedException(new ShardId("_index", "_uuid", 12), "foo", new IllegalStateException("bar"));
assertExceptionAsJson(e, false, equalTo("{\"type\":\"illegal_state_exception\",\"reason\":\"bar\"}"));
e = new ElasticsearchException(new IllegalArgumentException("foo"));
assertExceptionAsJson(e, false, equalTo("{\"type\":\"exception\",\"reason\":\"java.lang.IllegalArgumentException: foo\"," +
"\"caused_by\":{\"type\":\"illegal_argument_exception\",\"reason\":\"foo\"}}"));
e = new ElasticsearchException("foo", new IllegalStateException("bar"));
assertExceptionAsJson(e, false, equalTo("{\"type\":\"exception\",\"reason\":\"foo\"," +
"\"caused_by\":{\"type\":\"illegal_state_exception\",\"reason\":\"bar\"}}"));
// Test the same exception but with the "rest.exception.stacktrace.skip" parameter disabled: the stack_trace must be present
// in the JSON. Since the stack can be large, it only checks the beginning of the JSON.
assertExceptionAsJson(e, true, startsWith("{\"type\":\"exception\",\"reason\":\"foo\"," +
"\"caused_by\":{\"type\":\"illegal_state_exception\",\"reason\":\"bar\"," +
"\"stack_trace\":\"java.lang.IllegalStateException: bar"));
}
public void testToXContentWithHeaders() throws IOException {
ElasticsearchException e = new ElasticsearchException("foo",
new ElasticsearchException("bar",
new ElasticsearchException("baz",
new ClusterBlockException(singleton(DiscoverySettings.NO_MASTER_BLOCK_WRITES)))));
e.addHeader("foo_0", "0");
e.addHeader("foo_1", "1");
e.addHeader("es.header_foo_0", "foo_0");
e.addHeader("es.header_foo_1", "foo_1");
final String expectedJson = "{"
+ "\"type\":\"exception\","
+ "\"reason\":\"foo\","
+ "\"header_foo_0\":\"foo_0\","
+ "\"header_foo_1\":\"foo_1\","
+ "\"caused_by\":{"
+ "\"type\":\"exception\","
+ "\"reason\":\"bar\","
+ "\"caused_by\":{"
+ "\"type\":\"exception\","
+ "\"reason\":\"baz\","
+ "\"caused_by\":{"
+ "\"type\":\"cluster_block_exception\","
+ "\"reason\":\"blocked by: [SERVICE_UNAVAILABLE/2/no master];\""
+ "}"
+ "}"
+ "},"
+ "\"header\":{"
+ "\"foo_0\":\"0\","
+ "\"foo_1\":\"1\""
+ "}"
+ "}";
assertExceptionAsJson(e, false, equalTo(expectedJson));
ElasticsearchException parsed;
try (XContentParser parser = XContentType.JSON.xContent().createParser(expectedJson)) {
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
parsed = ElasticsearchException.fromXContent(parser);
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
assertNull(parser.nextToken());
}
assertNotNull(parsed);
assertEquals(parsed.getMessage(), "Elasticsearch exception [type=exception, reason=foo]");
assertThat(parsed.getHeaderKeys(), hasSize(4));
assertEquals(parsed.getHeader("header_foo_0").get(0), "foo_0");
assertEquals(parsed.getHeader("header_foo_1").get(0), "foo_1");
assertEquals(parsed.getHeader("foo_0").get(0), "0");
assertEquals(parsed.getHeader("foo_1").get(0), "1");
ElasticsearchException cause = (ElasticsearchException) parsed.getCause();
assertEquals(cause.getMessage(), "Elasticsearch exception [type=exception, reason=bar]");
cause = (ElasticsearchException) cause.getCause();
assertEquals(cause.getMessage(), "Elasticsearch exception [type=exception, reason=baz]");
cause = (ElasticsearchException) cause.getCause();
assertEquals(cause.getMessage(),
"Elasticsearch exception [type=cluster_block_exception, reason=blocked by: [SERVICE_UNAVAILABLE/2/no master];]");
}
public void testFromXContent() throws IOException {
final XContent xContent = randomFrom(XContentType.values()).xContent();
XContentBuilder builder = XContentBuilder.builder(xContent)
.startObject()
.field("type", "foo")
.field("reason", "something went wrong")
.field("stack_trace", "...")
.endObject();
ElasticsearchException parsed;
try (XContentParser parser = xContent.createParser(builder.bytes())) {
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
parsed = ElasticsearchException.fromXContent(parser);
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
assertNull(parser.nextToken());
}
assertNotNull(parsed);
assertEquals(parsed.getMessage(), "Elasticsearch exception [type=foo, reason=something went wrong, stack_trace=...]");
}
public void testFromXContentWithCause() throws IOException {
ElasticsearchException e = new ElasticsearchException("foo",
new ElasticsearchException("bar",
new ElasticsearchException("baz",
new RoutingMissingException("_test", "_type", "_id"))));
final XContent xContent = randomFrom(XContentType.values()).xContent();
XContentBuilder builder = XContentBuilder.builder(xContent).startObject().value(e).endObject();
ElasticsearchException parsed;
try (XContentParser parser = xContent.createParser(builder.bytes())) {
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
parsed = ElasticsearchException.fromXContent(parser);
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
assertNull(parser.nextToken());
}
assertNotNull(parsed);
assertEquals(parsed.getMessage(), "Elasticsearch exception [type=exception, reason=foo]");
ElasticsearchException cause = (ElasticsearchException) parsed.getCause();
assertEquals(cause.getMessage(), "Elasticsearch exception [type=exception, reason=bar]");
cause = (ElasticsearchException) cause.getCause();
assertEquals(cause.getMessage(), "Elasticsearch exception [type=exception, reason=baz]");
cause = (ElasticsearchException) cause.getCause();
assertEquals(cause.getMessage(),
"Elasticsearch exception [type=routing_missing_exception, reason=routing is required for [_test]/[_type]/[_id]]");
assertThat(cause.getHeaderKeys(), hasSize(2));
assertThat(cause.getHeader("index"), hasItem("_test"));
assertThat(cause.getHeader("index_uuid"), hasItem("_na_"));
}
public void testFromXContentWithHeaders() throws IOException {
RoutingMissingException routing = new RoutingMissingException("_test", "_type", "_id");
ElasticsearchException baz = new ElasticsearchException("baz", routing);
baz.addHeader("baz_0", "baz0");
baz.addHeader("es.baz_1", "baz1");
baz.addHeader("baz_2", "baz2");
baz.addHeader("es.baz_3", "baz3");
ElasticsearchException bar = new ElasticsearchException("bar", baz);
bar.addHeader("es.bar_0", "bar0");
bar.addHeader("bar_1", "bar1");
bar.addHeader("es.bar_2", "bar2");
ElasticsearchException foo = new ElasticsearchException("foo", bar);
foo.addHeader("es.foo_0", "foo0");
foo.addHeader("foo_1", "foo1");
final XContent xContent = randomFrom(XContentType.values()).xContent();
XContentBuilder builder = XContentBuilder.builder(xContent).startObject().value(foo).endObject();
ElasticsearchException parsed;
try (XContentParser parser = xContent.createParser(builder.bytes())) {
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
parsed = ElasticsearchException.fromXContent(parser);
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
assertNull(parser.nextToken());
}
assertNotNull(parsed);
assertEquals(parsed.getMessage(), "Elasticsearch exception [type=exception, reason=foo]");
assertThat(parsed.getHeaderKeys(), hasSize(2));
assertThat(parsed.getHeader("foo_0"), hasItem("foo0"));
assertThat(parsed.getHeader("foo_1"), hasItem("foo1"));
ElasticsearchException cause = (ElasticsearchException) parsed.getCause();
assertEquals(cause.getMessage(), "Elasticsearch exception [type=exception, reason=bar]");
assertThat(cause.getHeaderKeys(), hasSize(3));
assertThat(cause.getHeader("bar_0"), hasItem("bar0"));
assertThat(cause.getHeader("bar_1"), hasItem("bar1"));
assertThat(cause.getHeader("bar_2"), hasItem("bar2"));
cause = (ElasticsearchException) cause.getCause();
assertEquals(cause.getMessage(), "Elasticsearch exception [type=exception, reason=baz]");
assertThat(cause.getHeaderKeys(), hasSize(4));
assertThat(cause.getHeader("baz_0"), hasItem("baz0"));
assertThat(cause.getHeader("baz_1"), hasItem("baz1"));
assertThat(cause.getHeader("baz_2"), hasItem("baz2"));
assertThat(cause.getHeader("baz_3"), hasItem("baz3"));
cause = (ElasticsearchException) cause.getCause();
assertEquals(cause.getMessage(),
"Elasticsearch exception [type=routing_missing_exception, reason=routing is required for [_test]/[_type]/[_id]]");
assertThat(cause.getHeaderKeys(), hasSize(2));
assertThat(cause.getHeader("index"), hasItem("_test"));
assertThat(cause.getHeader("index_uuid"), hasItem("_na_"));
}
/**
* Builds a {@link ToXContent} using a JSON XContentBuilder and check the resulting string with the given {@link Matcher}.
*
* By default, the stack trace of the exception is not rendered. The parameter `errorTrace` forces the stack trace to
* be rendered like the REST API does when the "error_trace" parameter is set to true.
*/
private static void assertExceptionAsJson(ElasticsearchException e, boolean errorTrace, Matcher<String> expected)
throws IOException {
ToXContent.Params params = ToXContent.EMPTY_PARAMS;
if (errorTrace) {
params = new ToXContent.MapParams(Collections.singletonMap(ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE, "false"));
}
try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) {
builder.startObject();
e.toXContent(builder, params);
builder.endObject();
assertThat(builder.bytes().utf8ToString(), expected);
}
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.action.admin.indices.exists;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.discovery.MasterNotDiscoveredException;
import org.elasticsearch.discovery.zen.ElectMasterService;
import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.gateway.GatewayService;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
@ -36,7 +37,9 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThro
public class IndicesExistsIT extends ESIntegTestCase { public class IndicesExistsIT extends ESIntegTestCase {
public void testIndexExistsWithBlocksInPlace() throws IOException { public void testIndexExistsWithBlocksInPlace() throws IOException {
Settings settings = Settings.builder().put(GatewayService.RECOVER_AFTER_NODES_SETTING.getKey(), 99).build(); Settings settings = Settings.builder()
.put(GatewayService.RECOVER_AFTER_NODES_SETTING.getKey(), 99)
.put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 1).build();
String node = internalCluster().startNode(settings); String node = internalCluster().startNode(settings);
assertThrows(client(node).admin().indices().prepareExists("test").setMasterNodeTimeout(TimeValue.timeValueSeconds(0)), assertThrows(client(node).admin().indices().prepareExists("test").setMasterNodeTimeout(TimeValue.timeValueSeconds(0)),

View File

@ -19,9 +19,11 @@
package org.elasticsearch.action.admin.indices.rollover; package org.elasticsearch.action.admin.indices.rollover;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.util.Set; import java.util.Set;
@ -39,7 +41,7 @@ public class RolloverRequestTests extends ESTestCase {
.field("max_docs", 100) .field("max_docs", 100)
.endObject() .endObject()
.endObject(); .endObject();
request.source(builder.bytes()); RolloverRequest.PARSER.parse(XContentHelper.createParser(builder.bytes()), request, () -> ParseFieldMatcher.EMPTY);
Set<Condition> conditions = request.getConditions(); Set<Condition> conditions = request.getConditions();
assertThat(conditions.size(), equalTo(2)); assertThat(conditions.size(), equalTo(2));
for (Condition condition : conditions) { for (Condition condition : conditions) {
@ -80,7 +82,7 @@ public class RolloverRequestTests extends ESTestCase {
.startObject("alias1").endObject() .startObject("alias1").endObject()
.endObject() .endObject()
.endObject(); .endObject();
request.source(builder.bytes()); RolloverRequest.PARSER.parse(XContentHelper.createParser(builder.bytes()), request, () -> ParseFieldMatcher.EMPTY);
Set<Condition> conditions = request.getConditions(); Set<Condition> conditions = request.getConditions();
assertThat(conditions.size(), equalTo(2)); assertThat(conditions.size(), equalTo(2));
assertThat(request.getCreateIndexRequest().mappings().size(), equalTo(1)); assertThat(request.getCreateIndexRequest().mappings().size(), equalTo(1));

View File

@ -20,7 +20,7 @@
package org.elasticsearch.action.fieldstats; package org.elasticsearch.action.fieldstats;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.StreamsUtils; import org.elasticsearch.test.StreamsUtils;
@ -39,7 +39,7 @@ public class FieldStatsRequestTests extends ESTestCase {
StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/fieldstats/" + StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/fieldstats/" +
"fieldstats-index-constraints-request.json")); "fieldstats-index-constraints-request.json"));
FieldStatsRequest request = new FieldStatsRequest(); FieldStatsRequest request = new FieldStatsRequest();
request.source(XContentFactory.xContent(data).createParser(data)); request.source(createParser(JsonXContent.jsonXContent, data));
assertThat(request.getFields().length, equalTo(5)); assertThat(request.getFields().length, equalTo(5));
assertThat(request.getFields()[0], equalTo("field1")); assertThat(request.getFields()[0], equalTo("field1"));

View File

@ -19,10 +19,21 @@
package org.elasticsearch.action.support.replication; package org.elasticsearch.action.support.replication;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.RoutingMissingException;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.IndexShardRecoveringException;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.EqualsHashCodeTestUtils;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.function.Supplier;
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
public class ReplicationResponseTests extends ESTestCase { public class ReplicationResponseTests extends ESTestCase {
@ -36,4 +47,97 @@ public class ReplicationResponseTests extends ESTestCase {
equalTo(String.format(Locale.ROOT, "ShardInfo{total=5, successful=%d, failures=[]}", successful))); equalTo(String.format(Locale.ROOT, "ShardInfo{total=5, successful=%d, failures=[]}", successful)));
} }
public void testShardInfoEqualsAndHashcode() {
EqualsHashCodeTestUtils.CopyFunction<ReplicationResponse.ShardInfo> copy = shardInfo ->
new ReplicationResponse.ShardInfo(shardInfo.getTotal(), shardInfo.getSuccessful(), shardInfo.getFailures());
EqualsHashCodeTestUtils.MutateFunction<ReplicationResponse.ShardInfo> mutate = shardInfo -> {
List<Supplier<ReplicationResponse.ShardInfo>> mutations = new ArrayList<>();
mutations.add(() ->
new ReplicationResponse.ShardInfo(shardInfo.getTotal() + 1, shardInfo.getSuccessful(), shardInfo.getFailures()));
mutations.add(() ->
new ReplicationResponse.ShardInfo(shardInfo.getTotal(), shardInfo.getSuccessful() + 1, shardInfo.getFailures()));
mutations.add(() -> {
int nbFailures = randomIntBetween(1, 5);
return new ReplicationResponse.ShardInfo(shardInfo.getTotal(), shardInfo.getSuccessful(), randomFailures(nbFailures));
});
return randomFrom(mutations).get();
};
checkEqualsAndHashCode(randomShardInfo(), copy, mutate);
}
public void testFailureEqualsAndHashcode() {
EqualsHashCodeTestUtils.CopyFunction<ReplicationResponse.ShardInfo.Failure> copy = failure -> {
Index index = failure.fullShardId().getIndex();
ShardId shardId = new ShardId(index.getName(), index.getUUID(), failure.shardId());
Exception cause = (Exception) failure.getCause();
return new ReplicationResponse.ShardInfo.Failure(shardId, failure.nodeId(), cause, failure.status(), failure.primary());
};
EqualsHashCodeTestUtils.MutateFunction<ReplicationResponse.ShardInfo.Failure> mutate = failure -> {
List<Supplier<ReplicationResponse.ShardInfo.Failure>> mutations = new ArrayList<>();
final Index index = failure.fullShardId().getIndex();
final ShardId randomIndex = new ShardId(randomUnicodeOfCodepointLength(5), index.getUUID(), failure.shardId());
mutations.add(() -> new ReplicationResponse.ShardInfo.Failure(randomIndex, failure.nodeId(), (Exception) failure.getCause(),
failure.status(), failure.primary()));
final ShardId randomUUID = new ShardId(index.getName(), randomUnicodeOfCodepointLength(5), failure.shardId());
mutations.add(() -> new ReplicationResponse.ShardInfo.Failure(randomUUID, failure.nodeId(), (Exception) failure.getCause(),
failure.status(), failure.primary()));
final ShardId randomShardId = new ShardId(index.getName(),index.getUUID(), failure.shardId() + randomIntBetween(1, 3));
mutations.add(() -> new ReplicationResponse.ShardInfo.Failure(randomShardId, failure.nodeId(), (Exception) failure.getCause(),
failure.status(), failure.primary()));
final String randomNode = randomUnicodeOfLength(3);
mutations.add(() -> new ReplicationResponse.ShardInfo.Failure(failure.fullShardId(), randomNode, (Exception) failure.getCause(),
failure.status(), failure.primary()));
final Exception randomException = randomFrom(new IllegalStateException("a"), new IllegalArgumentException("b"));
mutations.add(() -> new ReplicationResponse.ShardInfo.Failure(failure.fullShardId(), failure.nodeId(), randomException,
failure.status(), failure.primary()));
final RestStatus randomStatus = randomFrom(RestStatus.values());
mutations.add(() -> new ReplicationResponse.ShardInfo.Failure(failure.fullShardId(), failure.nodeId(),
(Exception) failure.getCause(), randomStatus, failure.primary()));
final boolean randomPrimary = !failure.primary();
mutations.add(() -> new ReplicationResponse.ShardInfo.Failure(failure.fullShardId(), failure.nodeId(),
(Exception) failure.getCause(), failure.status(), randomPrimary));
return randomFrom(mutations).get();
};
checkEqualsAndHashCode(randomFailure(), copy, mutate);
}
private static ReplicationResponse.ShardInfo randomShardInfo() {
int total = randomIntBetween(1, 10);
int successful = randomIntBetween(0, total);
return new ReplicationResponse.ShardInfo(total, successful, randomFailures(Math.max(0, (total - successful))));
}
private static ReplicationResponse.ShardInfo.Failure[] randomFailures(int nbFailures) {
List<ReplicationResponse.ShardInfo.Failure> randomFailures = new ArrayList<>(nbFailures);
for (int i = 0; i < nbFailures; i++) {
randomFailures.add(randomFailure());
}
return randomFailures.toArray(new ReplicationResponse.ShardInfo.Failure[nbFailures]);
}
private static ReplicationResponse.ShardInfo.Failure randomFailure() {
return new ReplicationResponse.ShardInfo.Failure(
new ShardId(randomAsciiOfLength(5), randomAsciiOfLength(5), randomIntBetween(0, 5)),
randomAsciiOfLength(3),
randomFrom(
new IndexShardRecoveringException(new ShardId("_test", "_0", 5)),
new ElasticsearchException(new IllegalArgumentException("argument is wrong")),
new RoutingMissingException("_test", "_type", "_id")
),
randomFrom(RestStatus.values()),
randomBoolean()
);
}
} }

View File

@ -19,13 +19,11 @@
package org.elasticsearch.action.update; package org.elasticsearch.action.update;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
@ -36,8 +34,8 @@ import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContextRegistry; import org.elasticsearch.script.ScriptContextRegistry;
import org.elasticsearch.script.ScriptEngineRegistry; import org.elasticsearch.script.ScriptEngineRegistry;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.script.ScriptSettings; import org.elasticsearch.script.ScriptSettings;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.watcher.ResourceWatcherService;
@ -52,17 +50,16 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayContaining;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
public class UpdateRequestTests extends ESTestCase { public class UpdateRequestTests extends ESTestCase {
public void testUpdateRequest() throws Exception { public void testUpdateRequest() throws Exception {
UpdateRequest request = new UpdateRequest("test", "type", "1"); UpdateRequest request = new UpdateRequest("test", "type", "1");
// simple script // simple script
request.fromXContent(XContentFactory.jsonBuilder().startObject() request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder()
.field("script", "script1") .startObject()
.endObject()); .field("script", "script1")
.endObject().bytes()));
Script script = request.script(); Script script = request.script();
assertThat(script, notNullValue()); assertThat(script, notNullValue());
assertThat(script.getIdOrCode(), equalTo("script1")); assertThat(script.getIdOrCode(), equalTo("script1"));
@ -72,9 +69,9 @@ public class UpdateRequestTests extends ESTestCase {
assertThat(params, equalTo(Collections.emptyMap())); assertThat(params, equalTo(Collections.emptyMap()));
// simple verbose script // simple verbose script
request.fromXContent(XContentFactory.jsonBuilder().startObject() request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder().startObject()
.startObject("script").field("inline", "script1").endObject() .startObject("script").field("inline", "script1").endObject()
.endObject()); .endObject().bytes()));
script = request.script(); script = request.script();
assertThat(script, notNullValue()); assertThat(script, notNullValue());
assertThat(script.getIdOrCode(), equalTo("script1")); assertThat(script.getIdOrCode(), equalTo("script1"));
@ -85,13 +82,13 @@ public class UpdateRequestTests extends ESTestCase {
// script with params // script with params
request = new UpdateRequest("test", "type", "1"); request = new UpdateRequest("test", "type", "1");
request.fromXContent(XContentFactory.jsonBuilder().startObject() request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder().startObject()
.startObject("script") .startObject("script")
.field("inline", "script1") .field("inline", "script1")
.startObject("params") .startObject("params")
.field("param1", "value1") .field("param1", "value1")
.endObject() .endObject()
.endObject().endObject()); .endObject().endObject().bytes()));
script = request.script(); script = request.script();
assertThat(script, notNullValue()); assertThat(script, notNullValue());
assertThat(script.getIdOrCode(), equalTo("script1")); assertThat(script.getIdOrCode(), equalTo("script1"));
@ -103,9 +100,15 @@ public class UpdateRequestTests extends ESTestCase {
assertThat(params.get("param1").toString(), equalTo("value1")); assertThat(params.get("param1").toString(), equalTo("value1"));
request = new UpdateRequest("test", "type", "1"); request = new UpdateRequest("test", "type", "1");
request.fromXContent(XContentFactory.jsonBuilder().startObject().startObject("script") request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder()
.startObject("params").field("param1", "value1").endObject() .startObject()
.field("inline", "script1").endObject().endObject()); .startObject("script")
.startObject("params")
.field("param1", "value1")
.endObject()
.field("inline", "script1")
.endObject()
.endObject().bytes()));
script = request.script(); script = request.script();
assertThat(script, notNullValue()); assertThat(script, notNullValue());
assertThat(script.getIdOrCode(), equalTo("script1")); assertThat(script.getIdOrCode(), equalTo("script1"));
@ -118,7 +121,7 @@ public class UpdateRequestTests extends ESTestCase {
// script with params and upsert // script with params and upsert
request = new UpdateRequest("test", "type", "1"); request = new UpdateRequest("test", "type", "1");
request.fromXContent(XContentFactory.jsonBuilder().startObject() request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder().startObject()
.startObject("script") .startObject("script")
.startObject("params") .startObject("params")
.field("param1", "value1") .field("param1", "value1")
@ -130,7 +133,7 @@ public class UpdateRequestTests extends ESTestCase {
.startObject("compound") .startObject("compound")
.field("field2", "value2") .field("field2", "value2")
.endObject() .endObject()
.endObject().endObject()); .endObject().endObject().bytes()));
script = request.script(); script = request.script();
assertThat(script, notNullValue()); assertThat(script, notNullValue());
assertThat(script.getIdOrCode(), equalTo("script1")); assertThat(script.getIdOrCode(), equalTo("script1"));
@ -145,7 +148,7 @@ public class UpdateRequestTests extends ESTestCase {
assertThat(((Map) upsertDoc.get("compound")).get("field2").toString(), equalTo("value2")); assertThat(((Map) upsertDoc.get("compound")).get("field2").toString(), equalTo("value2"));
request = new UpdateRequest("test", "type", "1"); request = new UpdateRequest("test", "type", "1");
request.fromXContent(XContentFactory.jsonBuilder().startObject() request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder().startObject()
.startObject("upsert") .startObject("upsert")
.field("field1", "value1") .field("field1", "value1")
.startObject("compound") .startObject("compound")
@ -157,7 +160,7 @@ public class UpdateRequestTests extends ESTestCase {
.field("param1", "value1") .field("param1", "value1")
.endObject() .endObject()
.field("inline", "script1") .field("inline", "script1")
.endObject().endObject()); .endObject().endObject().bytes()));
script = request.script(); script = request.script();
assertThat(script, notNullValue()); assertThat(script, notNullValue());
assertThat(script.getIdOrCode(), equalTo("script1")); assertThat(script.getIdOrCode(), equalTo("script1"));
@ -173,69 +176,70 @@ public class UpdateRequestTests extends ESTestCase {
// script with doc // script with doc
request = new UpdateRequest("test", "type", "1"); request = new UpdateRequest("test", "type", "1");
request.fromXContent(XContentFactory.jsonBuilder().startObject() request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder()
.startObject("doc").field("field1", "value1").startObject("compound") .startObject()
.field("field2", "value2").endObject().endObject().endObject()); .startObject("doc")
.field("field1", "value1")
.startObject("compound")
.field("field2", "value2")
.endObject()
.endObject()
.endObject().bytes()));
Map<String, Object> doc = request.doc().sourceAsMap(); Map<String, Object> doc = request.doc().sourceAsMap();
assertThat(doc.get("field1").toString(), equalTo("value1")); assertThat(doc.get("field1").toString(), equalTo("value1"));
assertThat(((Map) doc.get("compound")).get("field2").toString(), equalTo("value2")); assertThat(((Map) doc.get("compound")).get("field2").toString(), equalTo("value2"));
} }
// Related to issue #15822
public void testInvalidBodyThrowsParseException() throws Exception {
UpdateRequest request = new UpdateRequest("test", "type", "1");
Exception e = expectThrows(ElasticsearchParseException.class, () -> request.fromXContent(new byte[] { (byte) '"' }));
assertThat(e.getMessage(), equalTo("Failed to derive xcontent"));
}
// Related to issue 15338 // Related to issue 15338
public void testFieldsParsing() throws Exception { public void testFieldsParsing() throws Exception {
UpdateRequest request = new UpdateRequest("test", "type1", "1") UpdateRequest request = new UpdateRequest("test", "type1", "1")
.fromXContent(new BytesArray("{\"doc\": {\"field1\": \"value1\"}, \"fields\": \"_source\"}")); .fromXContent(XContentHelper.createParser(new BytesArray("{\"doc\": {\"field1\": \"value1\"}, \"fields\": \"_source\"}")));
assertThat(request.doc().sourceAsMap().get("field1").toString(), equalTo("value1")); assertThat(request.doc().sourceAsMap().get("field1").toString(), equalTo("value1"));
assertThat(request.fields(), arrayContaining("_source")); assertThat(request.fields(), arrayContaining("_source"));
request = new UpdateRequest("test", "type2", "2") request = new UpdateRequest("test", "type2", "2").fromXContent(
.fromXContent(new BytesArray("{\"doc\": {\"field2\": \"value2\"}, \"fields\": [\"field1\", \"field2\"]}")); XContentHelper.createParser(new BytesArray("{\"doc\": {\"field2\": \"value2\"}, \"fields\": [\"field1\", \"field2\"]}")));
assertThat(request.doc().sourceAsMap().get("field2").toString(), equalTo("value2")); assertThat(request.doc().sourceAsMap().get("field2").toString(), equalTo("value2"));
assertThat(request.fields(), arrayContaining("field1", "field2")); assertThat(request.fields(), arrayContaining("field1", "field2"));
} }
public void testFetchSourceParsing() throws Exception { public void testFetchSourceParsing() throws Exception {
UpdateRequest request = new UpdateRequest("test", "type1", "1"); UpdateRequest request = new UpdateRequest("test", "type1", "1");
request.fromXContent( request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder()
XContentFactory.jsonBuilder().startObject().field("_source", true).endObject() .startObject()
); .field("_source", true)
.endObject().bytes()));
assertThat(request.fetchSource(), notNullValue()); assertThat(request.fetchSource(), notNullValue());
assertThat(request.fetchSource().includes().length, equalTo(0)); assertThat(request.fetchSource().includes().length, equalTo(0));
assertThat(request.fetchSource().excludes().length, equalTo(0)); assertThat(request.fetchSource().excludes().length, equalTo(0));
assertThat(request.fetchSource().fetchSource(), equalTo(true)); assertThat(request.fetchSource().fetchSource(), equalTo(true));
request.fromXContent( request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder()
XContentFactory.jsonBuilder().startObject().field("_source", false).endObject() .startObject()
); .field("_source", false)
.endObject().bytes()));
assertThat(request.fetchSource(), notNullValue()); assertThat(request.fetchSource(), notNullValue());
assertThat(request.fetchSource().includes().length, equalTo(0)); assertThat(request.fetchSource().includes().length, equalTo(0));
assertThat(request.fetchSource().excludes().length, equalTo(0)); assertThat(request.fetchSource().excludes().length, equalTo(0));
assertThat(request.fetchSource().fetchSource(), equalTo(false)); assertThat(request.fetchSource().fetchSource(), equalTo(false));
request.fromXContent( request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder()
XContentFactory.jsonBuilder().startObject().field("_source", "path.inner.*").endObject() .startObject()
); .field("_source", "path.inner.*")
.endObject().bytes()));
assertThat(request.fetchSource(), notNullValue()); assertThat(request.fetchSource(), notNullValue());
assertThat(request.fetchSource().fetchSource(), equalTo(true)); assertThat(request.fetchSource().fetchSource(), equalTo(true));
assertThat(request.fetchSource().includes().length, equalTo(1)); assertThat(request.fetchSource().includes().length, equalTo(1));
assertThat(request.fetchSource().excludes().length, equalTo(0)); assertThat(request.fetchSource().excludes().length, equalTo(0));
assertThat(request.fetchSource().includes()[0], equalTo("path.inner.*")); assertThat(request.fetchSource().includes()[0], equalTo("path.inner.*"));
request.fromXContent( request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder()
XContentFactory.jsonBuilder().startObject() .startObject()
.startObject("_source") .startObject("_source")
.field("includes", "path.inner.*") .field("includes", "path.inner.*")
.field("excludes", "another.inner.*") .field("excludes", "another.inner.*")
.endObject() .endObject()
.endObject() .endObject().bytes()));
);
assertThat(request.fetchSource(), notNullValue()); assertThat(request.fetchSource(), notNullValue());
assertThat(request.fetchSource().fetchSource(), equalTo(true)); assertThat(request.fetchSource().fetchSource(), equalTo(true));
assertThat(request.fetchSource().includes().length, equalTo(1)); assertThat(request.fetchSource().includes().length, equalTo(1));
@ -254,13 +258,17 @@ public class UpdateRequestTests extends ESTestCase {
Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>(); Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>();
scripts.put("ctx._source.update_timestamp = ctx._now", scripts.put("ctx._source.update_timestamp = ctx._now",
(vars) -> { (vars) -> {
Map<String, Object> ctx = (Map) vars.get("ctx"); Map<String, Object> vars2 = vars;
Map<String, Object> source = (Map) ctx.get("_source"); @SuppressWarnings("unchecked")
Map<String, Object> ctx = (Map<String, Object>) vars2.get("ctx");
@SuppressWarnings("unchecked")
Map<String, Object> source = (Map<String, Object>) ctx.get("_source");
source.put("update_timestamp", ctx.get("_now")); source.put("update_timestamp", ctx.get("_now"));
return null;}); return null;});
scripts.put("ctx._timestamp = ctx._now", scripts.put("ctx._timestamp = ctx._now",
(vars) -> { (vars) -> {
Map<String, Object> ctx = (Map) vars.get("ctx"); @SuppressWarnings("unchecked")
Map<String, Object> ctx = (Map<String, Object>) vars.get("ctx");
ctx.put("_timestamp", ctx.get("_now")); ctx.put("_timestamp", ctx.get("_now"));
return null;}); return null;});
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList()); ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());

View File

@ -268,7 +268,7 @@ public class MinimumMasterNodesIT extends ESIntegTestCase {
public void testDynamicUpdateMinimumMasterNodes() throws Exception { public void testDynamicUpdateMinimumMasterNodes() throws Exception {
Settings settings = Settings.builder() Settings settings = Settings.builder()
.put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "400ms") .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "400ms")
.put("discovery.initial_state_timeout", "500ms") .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), "1")
.build(); .build();
logger.info("--> start first node and wait for it to be a master"); logger.info("--> start first node and wait for it to be a master");

View File

@ -21,6 +21,7 @@ package org.elasticsearch.common.xcontent;
import com.fasterxml.jackson.core.JsonGenerationException; import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonGenerator;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Constants; import org.apache.lucene.util.Constants;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
@ -268,7 +269,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
final byte[] randomBytes = randomBytes(); final byte[] randomBytes = randomBytes();
BytesReference bytes = builder().startObject().field("binary", randomBytes).endObject().bytes(); BytesReference bytes = builder().startObject().field("binary", randomBytes).endObject().bytes();
XContentParser parser = xcontentType().xContent().createParser(bytes); XContentParser parser = createParser(xcontentType().xContent(), bytes);
assertSame(parser.nextToken(), Token.START_OBJECT); assertSame(parser.nextToken(), Token.START_OBJECT);
assertSame(parser.nextToken(), Token.FIELD_NAME); assertSame(parser.nextToken(), Token.FIELD_NAME);
assertEquals(parser.currentName(), "binary"); assertEquals(parser.currentName(), "binary");
@ -284,7 +285,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
final byte[] randomBytes = randomBytes(); final byte[] randomBytes = randomBytes();
BytesReference bytes = builder().startObject().field("binary").value(randomBytes).endObject().bytes(); BytesReference bytes = builder().startObject().field("binary").value(randomBytes).endObject().bytes();
XContentParser parser = xcontentType().xContent().createParser(bytes); XContentParser parser = createParser(xcontentType().xContent(), bytes);
assertSame(parser.nextToken(), Token.START_OBJECT); assertSame(parser.nextToken(), Token.START_OBJECT);
assertSame(parser.nextToken(), Token.FIELD_NAME); assertSame(parser.nextToken(), Token.FIELD_NAME);
assertEquals(parser.currentName(), "binary"); assertEquals(parser.currentName(), "binary");
@ -309,7 +310,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
} }
builder.endObject(); builder.endObject();
XContentParser parser = xcontentType().xContent().createParser(builder.bytes()); XContentParser parser = createParser(xcontentType().xContent(), builder.bytes());
assertSame(parser.nextToken(), Token.START_OBJECT); assertSame(parser.nextToken(), Token.START_OBJECT);
assertSame(parser.nextToken(), Token.FIELD_NAME); assertSame(parser.nextToken(), Token.FIELD_NAME);
assertEquals(parser.currentName(), "bin"); assertEquals(parser.currentName(), "bin");
@ -331,7 +332,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
} }
builder.endObject(); builder.endObject();
XContentParser parser = xcontentType().xContent().createParser(builder.bytes()); XContentParser parser = createParser(xcontentType().xContent(), builder.bytes());
assertSame(parser.nextToken(), Token.START_OBJECT); assertSame(parser.nextToken(), Token.START_OBJECT);
assertSame(parser.nextToken(), Token.FIELD_NAME); assertSame(parser.nextToken(), Token.FIELD_NAME);
assertEquals(parser.currentName(), "utf8"); assertEquals(parser.currentName(), "utf8");
@ -349,7 +350,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
final BytesReference random = new BytesArray(randomBytes()); final BytesReference random = new BytesArray(randomBytes());
XContentBuilder builder = builder().startObject().field("text", new Text(random)).endObject(); XContentBuilder builder = builder().startObject().field("text", new Text(random)).endObject();
XContentParser parser = xcontentType().xContent().createParser(builder.bytes()); XContentParser parser = createParser(xcontentType().xContent(), builder.bytes());
assertSame(parser.nextToken(), Token.START_OBJECT); assertSame(parser.nextToken(), Token.START_OBJECT);
assertSame(parser.nextToken(), Token.FIELD_NAME); assertSame(parser.nextToken(), Token.FIELD_NAME);
assertEquals(parser.currentName(), "text"); assertEquals(parser.currentName(), "text");

View File

@ -0,0 +1,81 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.xcontent;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
public class XContentParserUtilsTests extends ESTestCase {
private XContentType xContentType;
@Before
public void setUp() throws Exception {
super.setUp();
xContentType = randomFrom(XContentType.values());
}
public void testEnsureFieldName() throws IOException {
ParsingException e = expectThrows(ParsingException.class, () -> {
XContentParser parser = createParser(createBuilder().startObject().endObject().bytes());
// Parser current token is null
assertNull(parser.currentToken());
XContentParserUtils.ensureFieldName(parser.currentToken(), parser::getTokenLocation);
});
assertThat(e.getMessage(), equalTo("Failed to parse object: expecting token of type [FIELD_NAME] but found [null]"));
e = expectThrows(ParsingException.class, () -> {
XContentParser parser = createParser(createBuilder().startObject().field("foo", "bar").endObject().bytes());
// Parser next token is a start object
XContentParserUtils.ensureFieldName(parser.nextToken(), parser::getTokenLocation);
});
assertThat(e.getMessage(), equalTo("Failed to parse object: expecting token of type [FIELD_NAME] but found [START_OBJECT]"));
e = expectThrows(ParsingException.class, () -> {
XContentParser parser = createParser(createBuilder().startObject().field("foo", "bar").endObject().bytes());
// Moves to start object
assertThat(parser.nextToken(), is(XContentParser.Token.START_OBJECT));
// Expected field name is "foo", not "test"
XContentParserUtils.ensureFieldName(parser, parser.nextToken(), "test");
});
assertThat(e.getMessage(), equalTo("Failed to parse object: expecting field with name [test] but found [foo]"));
// Everything is fine
final String randomFieldName = randomAsciiOfLength(5);
XContentParser parser = createParser(createBuilder().startObject().field(randomFieldName, 0).endObject().bytes());
assertThat(parser.nextToken(), is(XContentParser.Token.START_OBJECT));
XContentParserUtils.ensureFieldName(parser, parser.nextToken(), randomFieldName);
}
private XContentBuilder createBuilder() throws IOException {
return XContentBuilder.builder(xContentType.xContent());
}
private XContentParser createParser(BytesReference bytes) throws IOException {
return xContentType.xContent().createParser(bytes);
}
}

View File

@ -626,12 +626,15 @@ public class NodeJoinControllerTests extends ESTestCase {
setState(clusterService, stateBuilder.build()); setState(clusterService, stateBuilder.build());
final DiscoveryNode restartedNode = new DiscoveryNode(otherNode.getId(), // conflict on node id or address
randomBoolean() ? otherNode.getAddress() : buildNewFakeTransportAddress(), otherNode.getAttributes(), final DiscoveryNode conflictingNode = randomBoolean() ?
otherNode.getRoles(), Version.CURRENT); new DiscoveryNode(otherNode.getId(), randomBoolean() ? otherNode.getAddress() : buildNewFakeTransportAddress(),
otherNode.getAttributes(), otherNode.getRoles(), Version.CURRENT) :
new DiscoveryNode("conflicting_address_node", otherNode.getAddress(), otherNode.getAttributes(), otherNode.getRoles(),
Version.CURRENT);
nodeJoinController.startElectionContext(); nodeJoinController.startElectionContext();
final SimpleFuture joinFuture = joinNodeAsync(restartedNode); final SimpleFuture joinFuture = joinNodeAsync(conflictingNode);
final CountDownLatch elected = new CountDownLatch(1); final CountDownLatch elected = new CountDownLatch(1);
nodeJoinController.waitToBeElectedAsMaster(1, TimeValue.timeValueHours(5), new NodeJoinController.ElectionCallback() { nodeJoinController.waitToBeElectedAsMaster(1, TimeValue.timeValueHours(5), new NodeJoinController.ElectionCallback() {
@Override @Override
@ -655,9 +658,9 @@ public class NodeJoinControllerTests extends ESTestCase {
assertTrue(finalNodes.isLocalNodeElectedMaster()); assertTrue(finalNodes.isLocalNodeElectedMaster());
assertThat(finalNodes.getLocalNode(), equalTo(masterNode)); assertThat(finalNodes.getLocalNode(), equalTo(masterNode));
assertThat(finalNodes.getSize(), equalTo(2)); assertThat(finalNodes.getSize(), equalTo(2));
assertThat(finalNodes.get(restartedNode.getId()), equalTo(restartedNode)); assertThat(finalNodes.get(conflictingNode.getId()), equalTo(conflictingNode));
List<ShardRouting> activeShardsOnRestartedNode = List<ShardRouting> activeShardsOnRestartedNode =
StreamSupport.stream(finalState.getRoutingNodes().node(restartedNode.getId()).spliterator(), false) StreamSupport.stream(finalState.getRoutingNodes().node(conflictingNode.getId()).spliterator(), false)
.filter(ShardRouting::active).collect(Collectors.toList()); .filter(ShardRouting::active).collect(Collectors.toList());
assertThat(activeShardsOnRestartedNode, empty()); assertThat(activeShardsOnRestartedNode, empty());
} }

View File

@ -24,6 +24,7 @@ import org.elasticsearch.cluster.block.ClusterBlock;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.discovery.zen.ElectMasterService;
import org.elasticsearch.node.Node; import org.elasticsearch.node.Node;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
@ -49,20 +50,22 @@ public class RecoverAfterNodesIT extends ESIntegTestCase {
return blocks; return blocks;
} }
public Client startNode(Settings.Builder settings) { public Client startNode(Settings.Builder settings, int minMasterNodes) {
String name = internalCluster().startNode(settings); String name = internalCluster().startNode(
Settings.builder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minMasterNodes)
.put(settings.build()));
return internalCluster().client(name); return internalCluster().client(name);
} }
public void testRecoverAfterNodes() throws Exception { public void testRecoverAfterNodes() throws Exception {
logger.info("--> start node (1)"); logger.info("--> start node (1)");
Client clientNode1 = startNode(Settings.builder().put("gateway.recover_after_nodes", 3)); Client clientNode1 = startNode(Settings.builder().put("gateway.recover_after_nodes", 3), 1);
assertThat(clientNode1.admin().cluster().prepareState().setLocal(true).execute().actionGet() assertThat(clientNode1.admin().cluster().prepareState().setLocal(true).execute().actionGet()
.getState().blocks().global(ClusterBlockLevel.METADATA_WRITE), .getState().blocks().global(ClusterBlockLevel.METADATA_WRITE),
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK)); hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
logger.info("--> start node (2)"); logger.info("--> start node (2)");
Client clientNode2 = startNode(Settings.builder().put("gateway.recover_after_nodes", 3)); Client clientNode2 = startNode(Settings.builder().put("gateway.recover_after_nodes", 3), 1);
Thread.sleep(BLOCK_WAIT_TIMEOUT.millis()); Thread.sleep(BLOCK_WAIT_TIMEOUT.millis());
assertThat(clientNode1.admin().cluster().prepareState().setLocal(true).execute().actionGet() assertThat(clientNode1.admin().cluster().prepareState().setLocal(true).execute().actionGet()
.getState().blocks().global(ClusterBlockLevel.METADATA_WRITE), .getState().blocks().global(ClusterBlockLevel.METADATA_WRITE),
@ -72,7 +75,7 @@ public class RecoverAfterNodesIT extends ESIntegTestCase {
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK)); hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
logger.info("--> start node (3)"); logger.info("--> start node (3)");
Client clientNode3 = startNode(Settings.builder().put("gateway.recover_after_nodes", 3)); Client clientNode3 = startNode(Settings.builder().put("gateway.recover_after_nodes", 3), 1);
assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, clientNode1).isEmpty(), equalTo(true)); assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, clientNode1).isEmpty(), equalTo(true));
assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, clientNode2).isEmpty(), equalTo(true)); assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, clientNode2).isEmpty(), equalTo(true));
@ -81,13 +84,17 @@ public class RecoverAfterNodesIT extends ESIntegTestCase {
public void testRecoverAfterMasterNodes() throws Exception { public void testRecoverAfterMasterNodes() throws Exception {
logger.info("--> start master_node (1)"); logger.info("--> start master_node (1)");
Client master1 = startNode(Settings.builder().put("gateway.recover_after_master_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true)); Client master1 = startNode(Settings.builder()
.put("gateway.recover_after_master_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), false)
.put(Node.NODE_MASTER_SETTING.getKey(), true), 1);
assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet() assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet()
.getState().blocks().global(ClusterBlockLevel.METADATA_WRITE), .getState().blocks().global(ClusterBlockLevel.METADATA_WRITE),
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK)); hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
logger.info("--> start data_node (1)"); logger.info("--> start data_node (1)");
Client data1 = startNode(Settings.builder().put("gateway.recover_after_master_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false)); Client data1 = startNode(Settings.builder()
.put("gateway.recover_after_master_nodes", 2)
.put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false), 1);
assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet() assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet()
.getState().blocks().global(ClusterBlockLevel.METADATA_WRITE), .getState().blocks().global(ClusterBlockLevel.METADATA_WRITE),
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK)); hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
@ -96,7 +103,9 @@ public class RecoverAfterNodesIT extends ESIntegTestCase {
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK)); hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
logger.info("--> start data_node (2)"); logger.info("--> start data_node (2)");
Client data2 = startNode(Settings.builder().put("gateway.recover_after_master_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false)); Client data2 = startNode(Settings.builder()
.put("gateway.recover_after_master_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), true)
.put(Node.NODE_MASTER_SETTING.getKey(), false), 1);
assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet() assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet()
.getState().blocks().global(ClusterBlockLevel.METADATA_WRITE), .getState().blocks().global(ClusterBlockLevel.METADATA_WRITE),
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK)); hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
@ -108,7 +117,10 @@ public class RecoverAfterNodesIT extends ESIntegTestCase {
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK)); hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
logger.info("--> start master_node (2)"); logger.info("--> start master_node (2)");
Client master2 = startNode(Settings.builder().put("gateway.recover_after_master_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true)); Client master2 = startNode(Settings.builder()
.put("gateway.recover_after_master_nodes", 2)
.put(Node.NODE_DATA_SETTING.getKey(), false)
.put(Node.NODE_MASTER_SETTING.getKey(), true), 1);
assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, master1).isEmpty(), equalTo(true)); assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, master1).isEmpty(), equalTo(true));
assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, master2).isEmpty(), equalTo(true)); assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, master2).isEmpty(), equalTo(true));
assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, data1).isEmpty(), equalTo(true)); assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, data1).isEmpty(), equalTo(true));
@ -117,13 +129,19 @@ public class RecoverAfterNodesIT extends ESIntegTestCase {
public void testRecoverAfterDataNodes() throws Exception { public void testRecoverAfterDataNodes() throws Exception {
logger.info("--> start master_node (1)"); logger.info("--> start master_node (1)");
Client master1 = startNode(Settings.builder().put("gateway.recover_after_data_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true)); Client master1 = startNode(Settings.builder()
.put("gateway.recover_after_data_nodes", 2)
.put(Node.NODE_DATA_SETTING.getKey(), false)
.put(Node.NODE_MASTER_SETTING.getKey(), true), 1);
assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet() assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet()
.getState().blocks().global(ClusterBlockLevel.METADATA_WRITE), .getState().blocks().global(ClusterBlockLevel.METADATA_WRITE),
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK)); hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
logger.info("--> start data_node (1)"); logger.info("--> start data_node (1)");
Client data1 = startNode(Settings.builder().put("gateway.recover_after_data_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false)); Client data1 = startNode(Settings.builder()
.put("gateway.recover_after_data_nodes", 2)
.put(Node.NODE_DATA_SETTING.getKey(), true)
.put(Node.NODE_MASTER_SETTING.getKey(), false), 1);
assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet() assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet()
.getState().blocks().global(ClusterBlockLevel.METADATA_WRITE), .getState().blocks().global(ClusterBlockLevel.METADATA_WRITE),
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK)); hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
@ -132,7 +150,10 @@ public class RecoverAfterNodesIT extends ESIntegTestCase {
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK)); hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
logger.info("--> start master_node (2)"); logger.info("--> start master_node (2)");
Client master2 = startNode(Settings.builder().put("gateway.recover_after_data_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true)); Client master2 = startNode(Settings.builder()
.put("gateway.recover_after_data_nodes", 2)
.put(Node.NODE_DATA_SETTING.getKey(), false)
.put(Node.NODE_MASTER_SETTING.getKey(), true), 1);
assertThat(master2.admin().cluster().prepareState().setLocal(true).execute().actionGet() assertThat(master2.admin().cluster().prepareState().setLocal(true).execute().actionGet()
.getState().blocks().global(ClusterBlockLevel.METADATA_WRITE), .getState().blocks().global(ClusterBlockLevel.METADATA_WRITE),
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK)); hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
@ -144,7 +165,10 @@ public class RecoverAfterNodesIT extends ESIntegTestCase {
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK)); hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
logger.info("--> start data_node (2)"); logger.info("--> start data_node (2)");
Client data2 = startNode(Settings.builder().put("gateway.recover_after_data_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false)); Client data2 = startNode(Settings.builder()
.put("gateway.recover_after_data_nodes", 2)
.put(Node.NODE_DATA_SETTING.getKey(), true)
.put(Node.NODE_MASTER_SETTING.getKey(), false), 1);
assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, master1).isEmpty(), equalTo(true)); assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, master1).isEmpty(), equalTo(true));
assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, master2).isEmpty(), equalTo(true)); assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, master2).isEmpty(), equalTo(true));
assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, data1).isEmpty(), equalTo(true)); assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, data1).isEmpty(), equalTo(true));

View File

@ -72,11 +72,12 @@ public class FieldTypeLookupTests extends ESTestCase {
MockFieldMapper f = new MockFieldMapper("foo"); MockFieldMapper f = new MockFieldMapper("foo");
MockFieldMapper f2 = new MockFieldMapper("foo"); MockFieldMapper f2 = new MockFieldMapper("foo");
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean()); lookup = lookup.copyAndAddAll("type1", newList(f), true);
FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), randomBoolean()); FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), true);
assertSame(f2.fieldType(), lookup2.get("foo"));
assertEquals(1, size(lookup2.iterator())); assertEquals(1, size(lookup2.iterator()));
assertSame(f.fieldType(), lookup2.get("foo"));
assertEquals(f2.fieldType(), lookup2.get("foo"));
} }
public void testAddExistingIndexName() { public void testAddExistingIndexName() {

View File

@ -19,16 +19,6 @@
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.function.Function;
import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -39,8 +29,17 @@ import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.function.Function;
import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.Matchers.hasToString;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.startsWith; import static org.hamcrest.Matchers.startsWith;
@ -169,7 +168,6 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
assertThat(mapperService.unmappedFieldType("string"), instanceOf(KeywordFieldType.class)); assertThat(mapperService.unmappedFieldType("string"), instanceOf(KeywordFieldType.class));
} }
public void testMergeWithMap() throws Throwable { public void testMergeWithMap() throws Throwable {
IndexService indexService1 = createIndex("index1"); IndexService indexService1 = createIndex("index1");
MapperService mapperService = indexService1.mapperService(); MapperService mapperService = indexService1.mapperService();
@ -187,4 +185,34 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
() -> mapperService.merge(mappings, false)); () -> mapperService.merge(mappings, false));
assertThat(e.getMessage(), startsWith("Failed to parse mapping [type1]: ")); assertThat(e.getMessage(), startsWith("Failed to parse mapping [type1]: "));
} }
public void testOtherDocumentMappersOnlyUpdatedWhenChangingFieldType() throws IOException {
IndexService indexService = createIndex("test");
CompressedXContent simpleMapping = new CompressedXContent(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("field")
.field("type", "text")
.endObject()
.endObject().endObject().bytes());
indexService.mapperService().merge("type1", simpleMapping, MergeReason.MAPPING_UPDATE, true);
DocumentMapper documentMapper = indexService.mapperService().documentMapper("type1");
indexService.mapperService().merge("type2", simpleMapping, MergeReason.MAPPING_UPDATE, true);
assertSame(indexService.mapperService().documentMapper("type1"), documentMapper);
CompressedXContent normsDisabledMapping = new CompressedXContent(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("field")
.field("type", "text")
.startObject("norms")
.field("enabled", false)
.endObject()
.endObject()
.endObject().endObject().bytes());
indexService.mapperService().merge("type3", normsDisabledMapping, MergeReason.MAPPING_UPDATE, true);
assertNotSame(indexService.mapperService().documentMapper("type1"), documentMapper);
}
} }

View File

@ -28,11 +28,30 @@ import org.elasticsearch.test.ESTestCase;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.concurrent.atomic.AtomicReference;
import static java.util.Collections.emptyMap; import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap; import static java.util.Collections.singletonMap;
public class RestRequestTests extends ESTestCase { public class RestRequestTests extends ESTestCase {
public void testContentParser() throws IOException {
Exception e = expectThrows(ElasticsearchParseException.class, () ->
new ContentRestRequest("", emptyMap()).contentParser());
assertEquals("Body required", e.getMessage());
e = expectThrows(ElasticsearchParseException.class, () ->
new ContentRestRequest("", singletonMap("source", "{}")).contentParser());
assertEquals("Body required", e.getMessage());
assertEquals(emptyMap(), new ContentRestRequest("{}", emptyMap()).contentParser().map());
}
public void testApplyContentParser() throws IOException {
new ContentRestRequest("", emptyMap()).applyContentParser(p -> fail("Shouldn't have been called"));
new ContentRestRequest("", singletonMap("source", "{}")).applyContentParser(p -> fail("Shouldn't have been called"));
AtomicReference<Object> source = new AtomicReference<>();
new ContentRestRequest("{}", emptyMap()).applyContentParser(p -> source.set(p.map()));
assertEquals(emptyMap(), source.get());
}
public void testContentOrSourceParam() throws IOException { public void testContentOrSourceParam() throws IOException {
assertEquals(BytesArray.EMPTY, new ContentRestRequest("", emptyMap()).contentOrSourceParam()); assertEquals(BytesArray.EMPTY, new ContentRestRequest("", emptyMap()).contentOrSourceParam());
assertEquals(new BytesArray("stuff"), new ContentRestRequest("stuff", emptyMap()).contentOrSourceParam()); assertEquals(new BytesArray("stuff"), new ContentRestRequest("stuff", emptyMap()).contentOrSourceParam());
@ -47,15 +66,6 @@ public class RestRequestTests extends ESTestCase {
assertEquals(true, new ContentRestRequest("", singletonMap("source", "stuff")).hasContentOrSourceParam()); assertEquals(true, new ContentRestRequest("", singletonMap("source", "stuff")).hasContentOrSourceParam());
} }
public void testContentOrSourceParamParserOrNull() throws IOException {
new ContentRestRequest("", emptyMap()).withContentOrSourceParamParserOrNull(parser -> assertNull(parser));
new ContentRestRequest("{}", emptyMap()).withContentOrSourceParamParserOrNull(parser -> assertEquals(emptyMap(), parser.map()));
new ContentRestRequest("{}", singletonMap("source", "stuff2")).withContentOrSourceParamParserOrNull(parser ->
assertEquals(emptyMap(), parser.map()));
new ContentRestRequest("", singletonMap("source", "{}")).withContentOrSourceParamParserOrNull(parser ->
assertEquals(emptyMap(), parser.map()));
}
public void testContentOrSourceParamParser() throws IOException { public void testContentOrSourceParamParser() throws IOException {
Exception e = expectThrows(ElasticsearchParseException.class, () -> Exception e = expectThrows(ElasticsearchParseException.class, () ->
new ContentRestRequest("", emptyMap()).contentOrSourceParamParser()); new ContentRestRequest("", emptyMap()).contentOrSourceParamParser());
@ -65,6 +75,15 @@ public class RestRequestTests extends ESTestCase {
assertEquals(emptyMap(), new ContentRestRequest("", singletonMap("source", "{}")).contentOrSourceParamParser().map()); assertEquals(emptyMap(), new ContentRestRequest("", singletonMap("source", "{}")).contentOrSourceParamParser().map());
} }
public void testWithContentOrSourceParamParserOrNull() throws IOException {
new ContentRestRequest("", emptyMap()).withContentOrSourceParamParserOrNull(parser -> assertNull(parser));
new ContentRestRequest("{}", emptyMap()).withContentOrSourceParamParserOrNull(parser -> assertEquals(emptyMap(), parser.map()));
new ContentRestRequest("{}", singletonMap("source", "stuff2")).withContentOrSourceParamParserOrNull(parser ->
assertEquals(emptyMap(), parser.map()));
new ContentRestRequest("", singletonMap("source", "{}")).withContentOrSourceParamParserOrNull(parser ->
assertEquals(emptyMap(), parser.map()));
}
private static final class ContentRestRequest extends RestRequest { private static final class ContentRestRequest extends RestRequest {
private final BytesArray content; private final BytesArray content;
public ContentRestRequest(String content, Map<String, String> params) { public ContentRestRequest(String content, Map<String, String> params) {

View File

@ -24,6 +24,7 @@ import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotR
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.io.IOException; import java.io.IOException;
@ -76,10 +77,9 @@ public class SnapshotRequestsTests extends ESTestCase {
builder.endArray(); builder.endArray();
} }
byte[] bytes = BytesReference.toBytes(builder.endObject().bytes()); BytesReference bytes = builder.endObject().bytes();
request.source(XContentHelper.createParser(bytes).mapOrdered());
request.source(bytes);
assertEquals("test-repo", request.repository()); assertEquals("test-repo", request.repository());
assertEquals("test-snap", request.snapshot()); assertEquals("test-snap", request.snapshot());
@ -135,10 +135,9 @@ public class SnapshotRequestsTests extends ESTestCase {
builder.endArray(); builder.endArray();
} }
byte[] bytes = BytesReference.toBytes(builder.endObject().bytes()); BytesReference bytes = builder.endObject().bytes();
request.source(XContentHelper.createParser(bytes).mapOrdered());
request.source(bytes);
assertEquals("test-repo", request.repository()); assertEquals("test-repo", request.repository());
assertEquals("test-snap", request.snapshot()); assertEquals("test-snap", request.snapshot());

View File

@ -19,7 +19,6 @@
package org.elasticsearch.threadpool; package org.elasticsearch.threadpool;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -27,9 +26,7 @@ import org.elasticsearch.common.unit.SizeValue;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.junit.Before; import org.junit.Before;
@ -81,11 +78,7 @@ public class ThreadPoolSerializationTests extends ESTestCase {
info.toXContent(builder, ToXContent.EMPTY_PARAMS); info.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject(); builder.endObject();
BytesReference bytesReference = builder.bytes(); Map<String, Object> map = XContentHelper.convertToMap(builder.bytes(), false).v2();
Map<String, Object> map;
try (XContentParser parser = XContentFactory.xContent(bytesReference).createParser(bytesReference)) {
map = parser.map();
}
assertThat(map, hasKey("foo")); assertThat(map, hasKey("foo"));
map = (Map<String, Object>) map.get("foo"); map = (Map<String, Object>) map.get("foo");
assertThat(map, hasKey("queue_size")); assertThat(map, hasKey("queue_size"));

View File

@ -6,7 +6,7 @@ See: https://github.com/elastic/docs
Snippets marked with `// CONSOLE` are automatically annotated with "VIEW IN Snippets marked with `// CONSOLE` are automatically annotated with "VIEW IN
SENSE" in the documentation and are automatically tested by the command SENSE" in the documentation and are automatically tested by the command
`gradle :docs:check`. To test just the docs from a single page, use e.g. `gradle :docs:check`. To test just the docs from a single page, use e.g.
`gradle :docs:check -Dtest.method=*rollover*`. `gradle :docs:check -Dtests.method=*rollover*`.
By default `// CONSOLE` snippet runs as its own isolated By default `// CONSOLE` snippet runs as its own isolated
test. You can manipulate the test execution in the following ways: test. You can manipulate the test execution in the following ways:

View File

@ -5,8 +5,12 @@ The suggest feature suggests similar looking terms based on a provided
text by using a suggester. Parts of the suggest feature are still under text by using a suggester. Parts of the suggest feature are still under
development. development.
The suggest request part is either defined alongside the query part in a The suggest request part is defined alongside the query part in a `_search`
`_search` request or via the REST `_suggest` endpoint. request.
NOTE: `_suggest` endpoint has been deprecated in favour of using suggest via
`_search` endpoint. In 5.0, the `_search` endpoint has been optimized for
suggest only search requests.
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
@ -30,25 +34,6 @@ POST twitter/_search
// CONSOLE // CONSOLE
// TEST[setup:twitter] // TEST[setup:twitter]
Suggest requests executed against the `_suggest` endpoint should omit
the surrounding `suggest` element which is only used if the suggest
request is part of a search.
[source,js]
--------------------------------------------------
POST _suggest
{
"my-suggestion" : {
"text" : "tring out Elasticsearch",
"term" : {
"field" : "message"
}
}
}
--------------------------------------------------
// CONSOLE
// TEST[setup:twitter]
Several suggestions can be specified per request. Each suggestion is Several suggestions can be specified per request. Each suggestion is
identified with an arbitrary name. In the example below two suggestions identified with an arbitrary name. In the example below two suggestions
are requested. Both `my-suggest-1` and `my-suggest-2` suggestions use are requested. Both `my-suggest-1` and `my-suggest-2` suggestions use
@ -56,18 +41,20 @@ the `term` suggester, but have a different `text`.
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
POST _suggest POST _search
{ {
"my-suggest-1" : { "suggest": {
"text" : "tring out Elasticsearch", "my-suggest-1" : {
"term" : { "text" : "tring out Elasticsearch",
"field" : "message" "term" : {
} "field" : "message"
}, }
"my-suggest-2" : { },
"text" : "kmichy", "my-suggest-2" : {
"term" : { "text" : "kmichy",
"field" : "user" "term" : {
"field" : "user"
}
} }
} }
} }
@ -85,27 +72,34 @@ in the suggest text and if found an arbitrary number of options.
-------------------------------------------------- --------------------------------------------------
{ {
"_shards": ... "_shards": ...
"my-suggest-1": [ { "hits": ...
"text": "tring", "took": 2,
"offset": 0, "timed_out": false,
"length": 5, "suggest": {
"options": [ {"text": "trying", "score": 0.8, "freq": 1 } ] "my-suggest-1": [ {
}, { "text": "tring",
"text": "out", "offset": 0,
"offset": 6, "length": 5,
"length": 3, "options": [ {"text": "trying", "score": 0.8, "freq": 1 } ]
"options": [] }, {
}, { "text": "out",
"text": "elasticsearch", "offset": 6,
"offset": 10, "length": 3,
"length": 13, "options": []
"options": [] }, {
} ], "text": "elasticsearch",
"my-suggest-2": ... "offset": 10,
"length": 13,
"options": []
} ],
"my-suggest-2": ...
}
} }
-------------------------------------------------- --------------------------------------------------
// TESTRESPONSE[s/"_shards": \.\.\./"_shards": "$body._shards",/] // TESTRESPONSE[s/"_shards": \.\.\./"_shards": "$body._shards",/]
// TESTRESPONSE[s/"my-suggest-2": \.\.\./"my-suggest-2": "$body.my-suggest-2"/] // TESTRESPONSE[s/"hits": .../"hits": "$body.hits",/]
// TESTRESPONSE[s/"took": 2,/"took": "$body.took",/]
// TESTRESPONSE[s/"my-suggest-2": \.\.\./"my-suggest-2": "$body.suggest.my-suggest-2"/]
Each options array contains an option object that includes the Each options array contains an option object that includes the
@ -123,17 +117,19 @@ and applies to the `my-suggest-1` and `my-suggest-2` suggestions.
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
POST _suggest POST _search
{ {
"text" : "tring out Elasticsearch", "suggest": {
"my-suggest-1" : { "text" : "tring out Elasticsearch",
"term" : { "my-suggest-1" : {
"field" : "message" "term" : {
} "field" : "message"
}, }
"my-suggest-2" : { },
"term" : { "my-suggest-2" : {
"field" : "user" "term" : {
"field" : "user"
}
} }
} }
} }

View File

@ -152,12 +152,14 @@ documents once deleted are never shown. This request:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
POST music/_suggest?pretty POST music/_search?pretty
{ {
"song-suggest" : { "suggest": {
"prefix" : "nir", "song-suggest" : {
"completion" : { "prefix" : "nir",
"field" : "suggest" "completion" : {
"field" : "suggest"
}
} }
} }
} }
@ -175,24 +177,30 @@ returns this response:
"successful" : 5, "successful" : 5,
"failed" : 0 "failed" : 0
}, },
"song-suggest" : [ { "hits": ...
"text" : "nir", "took": 2,
"offset" : 0, "timed_out": false,
"length" : 3, "suggest": {
"options" : [ { "song-suggest" : [ {
"text" : "Nirvana", "text" : "nir",
"_index": "music", "offset" : 0,
"_type": "song", "length" : 3,
"_id": "1", "options" : [ {
"_score": 1.0, "text" : "Nirvana",
"_source": { "_index": "music",
"suggest": ["Nevermind", "Nirvana"] "_type": "song",
} "_id": "1",
"_score": 1.0,
"_source": {
"suggest": ["Nevermind", "Nirvana"]
}
} ]
} ] } ]
} ] }
} }
-------------------------------------------------- --------------------------------------------------
// TESTRESPONSE // TESTRESPONSE[s/"hits": .../"hits": "$body.hits",/]
// TESTRESPONSE[s/"took": 2,/"took": "$body.took",/]
IMPORTANT: `_source` meta-field must be enabled, which is the default IMPORTANT: `_source` meta-field must be enabled, which is the default
@ -289,14 +297,16 @@ you can have a typo in your search and still get results back.
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
POST music/_suggest?pretty POST music/_search?pretty
{ {
"song-suggest" : { "suggest": {
"prefix" : "nor", "song-suggest" : {
"completion" : { "prefix" : "nor",
"field" : "suggest", "completion" : {
"fuzzy" : { "field" : "suggest",
"fuzziness" : 2 "fuzzy" : {
"fuzziness" : 2
}
} }
} }
} }
@ -346,12 +356,14 @@ you can express a prefix as a regular expression
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
POST music/_suggest?pretty POST music/_search?pretty
{ {
"song-suggest" : { "suggest": {
"regex" : "n[ever|i]r", "song-suggest" : {
"completion" : { "regex" : "n[ever|i]r",
"field" : "suggest" "completion" : {
"field" : "suggest"
}
} }
} }
} }

View File

@ -138,15 +138,17 @@ filters suggestions by multiple categories:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
POST place/_suggest?pretty POST place/_search?pretty
{ {
"suggest" : { "suggest": {
"prefix" : "tim", "place_suggestion" : {
"completion" : { "prefix" : "tim",
"field" : "suggest", "completion" : {
"size": 10, "field" : "suggest",
"contexts": { "size": 10,
"place_type": [ "cafe", "restaurants" ] "contexts": {
"place_type": [ "cafe", "restaurants" ]
}
} }
} }
} }
@ -165,18 +167,20 @@ suggestions associated with some categories:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
POST place/_suggest?pretty POST place/_search?pretty
{ {
"suggest" : { "suggest": {
"prefix" : "tim", "place_suggestion" : {
"completion" : { "prefix" : "tim",
"field" : "suggest", "completion" : {
"size": 10, "field" : "suggest",
"contexts": { "size": 10,
"place_type": [ <1> "contexts": {
{ "context" : "cafe" }, "place_type": [ <1>
{ "context" : "restaurants", "boost": 2 } { "context" : "cafe" },
] { "context" : "restaurants", "boost": 2 }
]
}
} }
} }
} }
@ -275,17 +279,19 @@ the encoded geohash of a geo point:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
POST place/_suggest POST place/_search
{ {
"suggest" : { "suggest": {
"prefix" : "tim", "place_suggestion" : {
"completion" : { "prefix" : "tim",
"field" : "suggest", "completion" : {
"size": 10, "field" : "suggest",
"contexts": { "size": 10,
"location": { "contexts": {
"lat": 43.662, "location": {
"lon": -79.380 "lat": 43.662,
"lon": -79.380
}
} }
} }
} }
@ -303,28 +309,30 @@ than others, as shown by the following:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
POST place/_suggest?pretty POST place/_search?pretty
{ {
"suggest" : { "suggest": {
"prefix" : "tim", "place_suggestion" : {
"completion" : { "prefix" : "tim",
"field" : "suggest", "completion" : {
"size": 10, "field" : "suggest",
"contexts": { "size": 10,
"location": [ <1> "contexts": {
{ "location": [ <1>
"lat": 43.6624803, {
"lon": -79.3863353,
"precision": 2
},
{
"context": {
"lat": 43.6624803, "lat": 43.6624803,
"lon": -79.3863353 "lon": -79.3863353,
"precision": 2
}, },
"boost": 2 {
} "context": {
] "lat": 43.6624803,
"lon": -79.3863353
},
"boost": 2
}
]
}
} }
} }
} }

View File

@ -84,21 +84,23 @@ suggester in the same spot you'd use the `term` suggester:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
POST _suggest POST test/_search
{ {
"text": "noble prize", "suggest": {
"simple_phrase": { "text": "noble prize",
"phrase": { "simple_phrase": {
"field": "title.trigram", "phrase": {
"size": 1,
"gram_size": 3,
"direct_generator": [ {
"field": "title.trigram", "field": "title.trigram",
"suggest_mode": "always" "size": 1,
} ], "gram_size": 3,
"highlight": { "direct_generator": [ {
"pre_tag": "<em>", "field": "title.trigram",
"post_tag": "</em>" "suggest_mode": "always"
} ],
"highlight": {
"pre_tag": "<em>",
"post_tag": "</em>"
}
} }
} }
} }
@ -112,21 +114,28 @@ The response contains suggestions scored by the most likely spell correction fir
-------------------------------------------------- --------------------------------------------------
{ {
"_shards": ... "_shards": ...
"simple_phrase" : [ "hits": ...
{ "timed_out": false,
"text" : "noble prize", "took": 3,
"offset" : 0, "suggest": {
"length" : 11, "simple_phrase" : [
"options" : [ { {
"text" : "nobel prize", "text" : "noble prize",
"highlighted": "<em>nobel</em> prize", "offset" : 0,
"score" : 0.5962314 "length" : 11,
}] "options" : [ {
} "text" : "nobel prize",
] "highlighted": "<em>nobel</em> prize",
"score" : 0.5962314
}]
}
]
}
} }
-------------------------------------------------- --------------------------------------------------
// TESTRESPONSE[s/"_shards": .../"_shards": "$body._shards",/] // TESTRESPONSE[s/"_shards": .../"_shards": "$body._shards",/]
// TESTRESPONSE[s/"hits": .../"hits": "$body.hits",/]
// TESTRESPONSE[s/"took": 3,/"took": "$body.took",/]
==== Basic Phrase suggest API parameters ==== Basic Phrase suggest API parameters
@ -217,28 +226,30 @@ The response contains suggestions scored by the most likely spell correction fir
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
POST _suggest POST _search
{ {
"text" : "noble prize", "suggest": {
"simple_phrase" : { "text" : "noble prize",
"phrase" : { "simple_phrase" : {
"field" : "title.trigram", "phrase" : {
"size" : 1, "field" : "title.trigram",
"direct_generator" : [ { "size" : 1,
"field" : "title.trigram", "direct_generator" : [ {
"suggest_mode" : "always", "field" : "title.trigram",
"min_word_length" : 1 "suggest_mode" : "always",
} ], "min_word_length" : 1
"collate": { } ],
"query": { <1> "collate": {
"inline" : { "query": { <1>
"match": { "inline" : {
"{{field_name}}" : "{{suggestion}}" <2> "match": {
"{{field_name}}" : "{{suggestion}}" <2>
}
} }
} },
}, "params": {"field_name" : "title"}, <3>
"params": {"field_name" : "title"}, <3> "prune": true <4>
"prune": true <4> }
} }
} }
} }
@ -381,22 +392,24 @@ accept ordinary analyzer names.
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
POST _suggest POST _search
{ {
"text" : "obel prize", "suggest": {
"simple_phrase" : { "text" : "obel prize",
"phrase" : { "simple_phrase" : {
"field" : "title.trigram", "phrase" : {
"size" : 1,
"direct_generator" : [ {
"field" : "title.trigram", "field" : "title.trigram",
"suggest_mode" : "always" "size" : 1,
}, { "direct_generator" : [ {
"field" : "title.reverse", "field" : "title.trigram",
"suggest_mode" : "always", "suggest_mode" : "always"
"pre_filter" : "reverse", }, {
"post_filter" : "reverse" "field" : "title.reverse",
} ] "suggest_mode" : "always",
"pre_filter" : "reverse",
"post_filter" : "reverse"
} ]
}
} }
} }
} }

View File

@ -28,21 +28,21 @@ import org.elasticsearch.ingest.Processor;
import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptException;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptType;
import static java.util.Collections.emptyMap; import static java.util.Collections.emptyMap;
import static org.elasticsearch.common.Strings.hasLength; import static org.elasticsearch.common.Strings.hasLength;
import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationException; import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationException;
import static org.elasticsearch.ingest.ConfigurationUtils.readOptionalMap; import static org.elasticsearch.ingest.ConfigurationUtils.readOptionalMap;
import static org.elasticsearch.ingest.ConfigurationUtils.readOptionalStringProperty; import static org.elasticsearch.ingest.ConfigurationUtils.readOptionalStringProperty;
import static org.elasticsearch.ingest.ConfigurationUtils.readStringProperty;
import static org.elasticsearch.script.ScriptType.FILE; import static org.elasticsearch.script.ScriptType.FILE;
import static org.elasticsearch.script.ScriptType.INLINE; import static org.elasticsearch.script.ScriptType.INLINE;
import static org.elasticsearch.script.ScriptType.STORED; import static org.elasticsearch.script.ScriptType.STORED;
/** /**
* Processor that adds new fields with their corresponding values. If the field is already present, its value * Processor that evaluates a script with an ingest document in its context.
* will be replaced with the provided one.
*/ */
public final class ScriptProcessor extends AbstractProcessor { public final class ScriptProcessor extends AbstractProcessor {
@ -51,12 +51,24 @@ public final class ScriptProcessor extends AbstractProcessor {
private final Script script; private final Script script;
private final ScriptService scriptService; private final ScriptService scriptService;
/**
* Processor that evaluates a script with an ingest document in its context
*
* @param tag The processor's tag.
* @param script The {@link Script} to execute.
* @param scriptService The {@link ScriptService} used to execute the script.
*/
ScriptProcessor(String tag, Script script, ScriptService scriptService) { ScriptProcessor(String tag, Script script, ScriptService scriptService) {
super(tag); super(tag);
this.script = script; this.script = script;
this.scriptService = scriptService; this.scriptService = scriptService;
} }
/**
* Executes the script with the Ingest document in context.
*
* @param document The Ingest document passed into the script context under the "ctx" object.
*/
@Override @Override
public void execute(IngestDocument document) { public void execute(IngestDocument document) {
ExecutableScript executableScript = scriptService.executable(script, ScriptContext.Standard.INGEST); ExecutableScript executableScript = scriptService.executable(script, ScriptContext.Standard.INGEST);
@ -111,16 +123,27 @@ public final class ScriptProcessor extends AbstractProcessor {
} }
final Script script; final Script script;
String scriptPropertyUsed;
if (Strings.hasLength(file)) { if (Strings.hasLength(file)) {
script = new Script(FILE, lang, file, (Map<String, Object>)params); script = new Script(FILE, lang, file, (Map<String, Object>)params);
scriptPropertyUsed = "file";
} else if (Strings.hasLength(inline)) { } else if (Strings.hasLength(inline)) {
script = new Script(INLINE, lang, inline, (Map<String, Object>)params); script = new Script(INLINE, lang, inline, (Map<String, Object>)params);
scriptPropertyUsed = "inline";
} else if (Strings.hasLength(id)) { } else if (Strings.hasLength(id)) {
script = new Script(STORED, lang, id, (Map<String, Object>)params); script = new Script(STORED, lang, id, (Map<String, Object>)params);
scriptPropertyUsed = "id";
} else { } else {
throw newConfigurationException(TYPE, processorTag, null, "Could not initialize script"); throw newConfigurationException(TYPE, processorTag, null, "Could not initialize script");
} }
// verify script is able to be compiled before successfully creating processor.
try {
scriptService.compile(script, ScriptContext.Standard.INGEST, script.getOptions());
} catch (ScriptException e) {
throw newConfigurationException(TYPE, processorTag, scriptPropertyUsed, e);
}
return new ScriptProcessor(processorTag, script, scriptService); return new ScriptProcessor(processorTag, script, scriptService);
} }
} }

View File

@ -21,6 +21,7 @@ package org.elasticsearch.ingest.common;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptException;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.junit.Before; import org.junit.Before;
@ -31,7 +32,9 @@ import java.util.Map;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class ScriptProcessorFactoryTests extends ESTestCase { public class ScriptProcessorFactoryTests extends ESTestCase {
@ -98,4 +101,22 @@ public class ScriptProcessorFactoryTests extends ESTestCase {
assertThat(exception.getMessage(), is("Need [file], [id], or [inline] parameter to refer to scripts")); assertThat(exception.getMessage(), is("Need [file], [id], or [inline] parameter to refer to scripts"));
} }
public void testFactoryInvalidateWithInvalidCompiledScript() throws Exception {
String randomType = randomFrom("inline", "file", "id");
ScriptService mockedScriptService = mock(ScriptService.class);
ScriptException thrownException = new ScriptException("compile-time exception", new RuntimeException(),
Collections.emptyList(), "script", "mockscript");
when(mockedScriptService.compile(any(), any(), any())).thenThrow(thrownException);
factory = new ScriptProcessor.Factory(mockedScriptService);
Map<String, Object> configMap = new HashMap<>();
configMap.put("lang", "mockscript");
configMap.put(randomType, "my_script");
ElasticsearchException exception = expectThrows(ElasticsearchException.class,
() -> factory.create(null, randomAsciiOfLength(10), configMap));
assertThat(exception.getMessage(), is("compile-time exception"));
}
} }

View File

@ -24,12 +24,10 @@ import java.util.Map;
import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.RandomDocumentPicks;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.mockito.stubbing.Answer;
import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.core.Is.is; import static org.hamcrest.core.Is.is;

View File

@ -117,16 +117,13 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler<ReindexReq
@Override @Override
protected ReindexRequest buildRequest(RestRequest request) throws IOException { protected ReindexRequest buildRequest(RestRequest request) throws IOException {
if (false == request.hasContent()) {
throw new IllegalArgumentException("_reindex requires a request body");
}
if (request.hasParam("pipeline")) { if (request.hasParam("pipeline")) {
throw new IllegalArgumentException("_reindex doesn't support [pipeline] as a query parmaeter. " throw new IllegalArgumentException("_reindex doesn't support [pipeline] as a query parmaeter. "
+ "Specify it in the [dest] object instead."); + "Specify it in the [dest] object instead.");
} }
ReindexRequest internal = new ReindexRequest(new SearchRequest(), new IndexRequest()); ReindexRequest internal = new ReindexRequest(new SearchRequest(), new IndexRequest());
try (XContentParser xcontent = XContentFactory.xContent(request.content()).createParser(request.content())) { try (XContentParser parser = request.contentParser()) {
PARSER.parse(xcontent, internal, new ReindexParseContext(searchRequestParsers, parseFieldMatcher)); PARSER.parse(parser, internal, new ReindexParseContext(searchRequestParsers, parseFieldMatcher));
} }
return internal; return internal;
} }

View File

@ -385,7 +385,6 @@ public class Netty4Transport extends TcpTransport<Channel> {
} }
throw e; throw e;
} }
onAfterChannelsConnected(nodeChannels);
success = true; success = true;
} finally { } finally {
if (success == false) { if (success == false) {
@ -399,14 +398,6 @@ public class Netty4Transport extends TcpTransport<Channel> {
return nodeChannels; return nodeChannels;
} }
/**
* Allows for logic to be executed after a connection has been made on all channels. While this method is being executed, the node is
* not listed as being connected to.
* @param nodeChannels the {@link NodeChannels} that have been connected
*/
protected void onAfterChannelsConnected(NodeChannels nodeChannels) {
}
private class ChannelCloseListener implements ChannelFutureListener { private class ChannelCloseListener implements ChannelFutureListener {
private final DiscoveryNode node; private final DiscoveryNode node;
@ -417,6 +408,7 @@ public class Netty4Transport extends TcpTransport<Channel> {
@Override @Override
public void operationComplete(final ChannelFuture future) throws Exception { public void operationComplete(final ChannelFuture future) throws Exception {
onChannelClosed(future.channel());
NodeChannels nodeChannels = connectedNodes.get(node); NodeChannels nodeChannels = connectedNodes.get(node);
if (nodeChannels != null && nodeChannels.hasChannel(future.channel())) { if (nodeChannels != null && nodeChannels.hasChannel(future.channel())) {
threadPool.generic().execute(() -> disconnectFromNode(node, future.channel(), "channel closed event")); threadPool.generic().execute(() -> disconnectFromNode(node, future.channel(), "channel closed event"));

View File

@ -23,7 +23,6 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.BytesRestResponse;
@ -83,7 +82,7 @@ public class TestDeprecationHeaderRestAction extends BaseRestHandler {
public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
final List<String> settings; final List<String> settings;
try (XContentParser parser = XContentFactory.xContent(request.content()).createParser(request.content())) { try (XContentParser parser = request.contentParser()) {
final Map<String, Object> source = parser.map(); final Map<String, Object> source = parser.map();
if (source.containsKey("deprecated_settings")) { if (source.containsKey("deprecated_settings")) {

View File

@ -115,3 +115,26 @@
- match: { _source.bytes_in: 1234 } - match: { _source.bytes_in: 1234 }
- match: { _source.bytes_out: 4321 } - match: { _source.bytes_out: 4321 }
- match: { _source.bytes_total: 5555 } - match: { _source.bytes_total: 5555 }
---
"Test script processor with syntax error in inline script":
- do:
catch: request
ingest.put_pipeline:
id: "my_pipeline"
body: >
{
"description": "_description",
"processors": [
{
"script" : {
"inline": "invalid painless, hear me roar!"
}
}
]
}
- match: { error.header.processor_type: "script" }
- match: { error.header.property_name: "inline" }
- match: { error.type: "script_exception" }
- match: { error.reason: "compile error" }

View File

@ -1,7 +1,7 @@
{ {
"suggest": { "suggest": {
"documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/search-suggesters.html", "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/search-suggesters.html",
"methods": ["POST", "GET"], "methods": ["POST"],
"url": { "url": {
"path": "/_suggest", "path": "/_suggest",
"paths": ["/_suggest", "/{index}/_suggest"], "paths": ["/_suggest", "/{index}/_suggest"],
@ -13,18 +13,18 @@
}, },
"params": { "params": {
"ignore_unavailable": { "ignore_unavailable": {
"type" : "boolean", "type" : "boolean",
"description" : "Whether specified concrete indices should be ignored when unavailable (missing or closed)" "description" : "Whether specified concrete indices should be ignored when unavailable (missing or closed)"
}, },
"allow_no_indices": { "allow_no_indices": {
"type" : "boolean", "type" : "boolean",
"description" : "Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)" "description" : "Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)"
}, },
"expand_wildcards": { "expand_wildcards": {
"type" : "enum", "type" : "enum",
"options" : ["open","closed","none","all"], "options" : ["open","closed","none","all"],
"default" : "open", "default" : "open",
"description" : "Whether to expand wildcard expression to concrete indices that are open, closed or both." "description" : "Whether to expand wildcard expression to concrete indices that are open, closed or both."
}, },
"preference": { "preference": {
"type" : "string", "type" : "string",

View File

@ -38,22 +38,24 @@ setup:
indices.refresh: {} indices.refresh: {}
- do: - do:
suggest: search:
index: test1 index: test1
body: body:
result: suggest:
text: "b" result:
completion: text: "b"
field: bar.completion completion:
field: bar.completion
- do: - do:
suggest: search:
index: test1 index: test1
body: body:
result: suggest:
text: "b" result:
completion: text: "b"
field: baz.completion completion:
field: baz.completion
- do: - do:
indices.refresh: {} indices.refresh: {}
@ -291,4 +293,3 @@ setup:
- gt: { _all.total.completion.fields.bar\.completion.size_in_bytes: 0 } - gt: { _all.total.completion.fields.bar\.completion.size_in_bytes: 0 }
- is_false: _all.total.completion.fields.baz\.completion - is_false: _all.total.completion.fields.baz\.completion
- is_false: _all.total.fielddata.fields - is_false: _all.total.fielddata.fields

View File

@ -13,6 +13,24 @@ setup:
"Basic tests for suggest API": "Basic tests for suggest API":
- do: - do:
search:
body:
suggest:
test_suggestion:
text: "The Amsterdma meetpu"
term:
field: body
- match: {suggest.test_suggestion.1.options.0.text: amsterdam}
- match: {suggest.test_suggestion.2.options.0.text: meetup}
---
"Suggest API should have deprecation warning":
- skip:
features: 'warnings'
- do:
warnings:
- "[POST /_suggest] is deprecated! Use [POST /_search] instead."
suggest: suggest:
body: body:
test_suggestion: test_suggestion:

View File

@ -0,0 +1,314 @@
# This test creates one huge mapping in the setup
# Every test should use its own field to make sure it works
setup:
- do:
indices.create:
index: test
body:
mappings:
test:
"properties":
"suggest_1":
"type" : "completion"
"suggest_2":
"type" : "completion"
"suggest_3":
"type" : "completion"
"suggest_4":
"type" : "completion"
"suggest_5a":
"type" : "completion"
"suggest_5b":
"type" : "completion"
"suggest_6":
"type" : "completion"
title:
type: keyword
---
"Simple suggestion should work":
- skip:
features: 'warnings'
- do:
index:
index: test
type: test
id: 1
body:
suggest_1: "bar"
- do:
index:
index: test
type: test
id: 2
body:
suggest_1: "baz"
- do:
indices.refresh: {}
- do:
warnings:
- "[POST /_suggest] is deprecated! Use [POST /_search] instead."
suggest:
body:
result:
text: "b"
completion:
field: suggest_1
- length: { result: 1 }
- length: { result.0.options: 2 }
---
"Simple suggestion array should work":
- skip:
features: 'warnings'
- do:
index:
index: test
type: test
id: 1
body:
suggest_2: ["bar", "foo"]
- do:
indices.refresh: {}
- do:
warnings:
- "[POST /_suggest] is deprecated! Use [POST /_search] instead."
suggest:
body:
result:
text: "f"
completion:
field: suggest_2
- length: { result: 1 }
- length: { result.0.options: 1 }
- match: { result.0.options.0.text: "foo" }
- do:
warnings:
- "[POST /_suggest] is deprecated! Use [POST /_search] instead."
suggest:
body:
result:
text: "b"
completion:
field: suggest_2
- length: { result: 1 }
- length: { result.0.options: 1 }
- match: { result.0.options.0.text: "bar" }
---
"Suggestion entry should work":
- skip:
features: 'warnings'
- do:
index:
index: test
type: test
id: 1
body:
suggest_3:
input: "bar"
weight: 2
- do:
index:
index: test
type: test
id: 2
body:
suggest_3:
input: "baz"
weight: 3
- do:
indices.refresh: {}
- do:
warnings:
- "[POST /_suggest] is deprecated! Use [POST /_search] instead."
suggest:
body:
result:
text: "b"
completion:
field: suggest_3
- length: { result: 1 }
- length: { result.0.options: 2 }
- match: { result.0.options.0.text: "baz" }
- match: { result.0.options.1.text: "bar" }
---
"Suggestion entry array should work":
- skip:
features: 'warnings'
- do:
index:
index: test
type: test
id: 1
body:
suggest_4:
- input: "bar"
weight: 3
- input: "fo"
weight: 3
- do:
index:
index: test
type: test
id: 2
body:
suggest_4:
- input: "baz"
weight: 2
- input: "foo"
weight: 1
- do:
indices.refresh: {}
- do:
warnings:
- "[POST /_suggest] is deprecated! Use [POST /_search] instead."
suggest:
body:
result:
text: "b"
completion:
field: suggest_4
- length: { result: 1 }
- length: { result.0.options: 2 }
- match: { result.0.options.0.text: "bar" }
- match: { result.0.options.1.text: "baz" }
- do:
warnings:
- "[POST /_suggest] is deprecated! Use [POST /_search] instead."
suggest:
body:
result:
text: "f"
completion:
field: suggest_4
- length: { result: 1 }
- length: { result.0.options: 2 }
- match: { result.0.options.0.text: "fo" }
- match: { result.0.options.1.text: "foo" }
---
"Multiple Completion fields should work":
- skip:
features: 'warnings'
- do:
index:
index: test
type: test
id: 1
body:
suggest_5a: "bar"
suggest_5b: "baz"
- do:
indices.refresh: {}
- do:
warnings:
- "[POST /_suggest] is deprecated! Use [POST /_search] instead."
suggest:
body:
result:
text: "b"
completion:
field: suggest_5a
- length: { result: 1 }
- length: { result.0.options: 1 }
- match: { result.0.options.0.text: "bar" }
- do:
warnings:
- "[POST /_suggest] is deprecated! Use [POST /_search] instead."
suggest:
body:
result:
text: "b"
completion:
field: suggest_5b
- length: { result: 1 }
- length: { result.0.options: 1 }
- match: { result.0.options.0.text: "baz" }
---
"Suggestions with source should work":
- skip:
features: 'warnings'
- do:
index:
index: test
type: test
id: 1
body:
suggest_6:
input: "bar"
weight: 2
title: "title_bar"
count: 4
- do:
index:
index: test
type: test
id: 2
body:
suggest_6:
input: "baz"
weight: 3
title: "title_baz"
count: 3
- do:
indices.refresh: {}
- do:
warnings:
- "[POST /_suggest] is deprecated! Use [POST /_search] instead."
suggest:
body:
result:
text: "b"
completion:
field: suggest_6
- length: { result: 1 }
- length: { result.0.options: 2 }
- match: { result.0.options.0.text: "baz" }
- match: { result.0.options.0._index: "test" }
- match: { result.0.options.0._type: "test" }
- match: { result.0.options.0._source.title: "title_baz" }
- match: { result.0.options.0._source.count: 3 }
- match: { result.0.options.1.text: "bar" }
- match: { result.0.options.1._index: "test" }
- match: { result.0.options.1._type: "test" }
- match: { result.0.options.1._source.title: "title_bar" }
- match: { result.0.options.1._source.count: 4 }

View File

@ -50,15 +50,16 @@ setup:
indices.refresh: {} indices.refresh: {}
- do: - do:
suggest: search:
body: body:
result: suggest:
text: "b" result:
completion: text: "b"
field: suggest_1 completion:
field: suggest_1
- length: { result: 1 } - length: { suggest.result: 1 }
- length: { result.0.options: 2 } - length: { suggest.result.0.options: 2 }
--- ---
"Simple suggestion array should work": "Simple suggestion array should work":
@ -75,28 +76,30 @@ setup:
indices.refresh: {} indices.refresh: {}
- do: - do:
suggest: search:
body: body:
result: suggest:
text: "f" result:
completion: text: "f"
field: suggest_2 completion:
field: suggest_2
- length: { result: 1 } - length: { suggest.result: 1 }
- length: { result.0.options: 1 } - length: { suggest.result.0.options: 1 }
- match: { result.0.options.0.text: "foo" } - match: { suggest.result.0.options.0.text: "foo" }
- do: - do:
suggest: search:
body: body:
result: suggest:
text: "b" result:
completion: text: "b"
field: suggest_2 completion:
field: suggest_2
- length: { result: 1 } - length: { suggest.result: 1 }
- length: { result.0.options: 1 } - length: { suggest.result.0.options: 1 }
- match: { result.0.options.0.text: "bar" } - match: { suggest.result.0.options.0.text: "bar" }
--- ---
"Suggestion entry should work": "Suggestion entry should work":
@ -125,17 +128,18 @@ setup:
indices.refresh: {} indices.refresh: {}
- do: - do:
suggest: search:
body: body:
result: suggest:
text: "b" result:
completion: text: "b"
field: suggest_3 completion:
field: suggest_3
- length: { result: 1 } - length: { suggest.result: 1 }
- length: { result.0.options: 2 } - length: { suggest.result.0.options: 2 }
- match: { result.0.options.0.text: "baz" } - match: { suggest.result.0.options.0.text: "baz" }
- match: { result.0.options.1.text: "bar" } - match: { suggest.result.0.options.1.text: "bar" }
--- ---
"Suggestion entry array should work": "Suggestion entry array should work":
@ -168,30 +172,32 @@ setup:
indices.refresh: {} indices.refresh: {}
- do: - do:
suggest: search:
body: body:
result: suggest:
text: "b" result:
completion: text: "b"
field: suggest_4 completion:
field: suggest_4
- length: { result: 1 } - length: { suggest.result: 1 }
- length: { result.0.options: 2 } - length: { suggest.result.0.options: 2 }
- match: { result.0.options.0.text: "bar" } - match: { suggest.result.0.options.0.text: "bar" }
- match: { result.0.options.1.text: "baz" } - match: { suggest.result.0.options.1.text: "baz" }
- do: - do:
suggest: search:
body: body:
result: suggest:
text: "f" result:
completion: text: "f"
field: suggest_4 completion:
field: suggest_4
- length: { result: 1 } - length: { suggest.result: 1 }
- length: { result.0.options: 2 } - length: { suggest.result.0.options: 2 }
- match: { result.0.options.0.text: "fo" } - match: { suggest.result.0.options.0.text: "fo" }
- match: { result.0.options.1.text: "foo" } - match: { suggest.result.0.options.1.text: "foo" }
--- ---
"Multiple Completion fields should work": "Multiple Completion fields should work":
@ -209,28 +215,30 @@ setup:
indices.refresh: {} indices.refresh: {}
- do: - do:
suggest: search:
body: body:
result: suggest:
text: "b" result:
completion: text: "b"
field: suggest_5a completion:
field: suggest_5a
- length: { result: 1 } - length: { suggest.result: 1 }
- length: { result.0.options: 1 } - length: { suggest.result.0.options: 1 }
- match: { result.0.options.0.text: "bar" } - match: { suggest.result.0.options.0.text: "bar" }
- do: - do:
suggest: search:
body: body:
result: suggest:
text: "b" result:
completion: text: "b"
field: suggest_5b completion:
field: suggest_5b
- length: { result: 1 } - length: { suggest.result: 1 }
- length: { result.0.options: 1 } - length: { suggest.result.0.options: 1 }
- match: { result.0.options.0.text: "baz" } - match: { suggest.result.0.options.0.text: "baz" }
--- ---
"Suggestions with source should work": "Suggestions with source should work":
@ -263,23 +271,23 @@ setup:
indices.refresh: {} indices.refresh: {}
- do: - do:
suggest: search:
body: body:
result: suggest:
text: "b" result:
completion: text: "b"
field: suggest_6 completion:
field: suggest_6
- length: { result: 1 }
- length: { result.0.options: 2 }
- match: { result.0.options.0.text: "baz" }
- match: { result.0.options.0._index: "test" }
- match: { result.0.options.0._type: "test" }
- match: { result.0.options.0._source.title: "title_baz" }
- match: { result.0.options.0._source.count: 3 }
- match: { result.0.options.1.text: "bar" }
- match: { result.0.options.1._index: "test" }
- match: { result.0.options.1._type: "test" }
- match: { result.0.options.1._source.title: "title_bar" }
- match: { result.0.options.1._source.count: 4 }
- length: { suggest.result: 1 }
- length: { suggest.result.0.options: 2 }
- match: { suggest.result.0.options.0.text: "baz" }
- match: { suggest.result.0.options.0._index: "test" }
- match: { suggest.result.0.options.0._type: "test" }
- match: { suggest.result.0.options.0._source.title: "title_baz" }
- match: { suggest.result.0.options.0._source.count: 3 }
- match: { suggest.result.0.options.1.text: "bar" }
- match: { suggest.result.0.options.1._index: "test" }
- match: { suggest.result.0.options.1._type: "test" }
- match: { suggest.result.0.options.1._source.title: "title_bar" }
- match: { suggest.result.0.options.1._source.count: 4 }

View File

@ -74,18 +74,19 @@ setup:
indices.refresh: {} indices.refresh: {}
- do: - do:
suggest: search:
body: body:
result: suggest:
text: "foo" result:
completion: text: "foo"
field: suggest_context completion:
contexts: field: suggest_context
color: "red" contexts:
color: "red"
- length: { result: 1 } - length: { suggest.result: 1 }
- length: { result.0.options: 1 } - length: { suggest.result.0.options: 1 }
- match: { result.0.options.0.text: "foo red" } - match: { suggest.result.0.options.0.text: "foo red" }
--- ---
"Category suggest context from path should work": "Category suggest context from path should work":
@ -114,45 +115,48 @@ setup:
indices.refresh: {} indices.refresh: {}
- do: - do:
suggest: search:
body: body:
result: suggest:
text: "foo" result:
completion: text: "foo"
field: suggest_context_with_path completion:
contexts: field: suggest_context_with_path
color: "red" contexts:
color: "red"
- length: { result: 1 } - length: { suggest.result: 1 }
- length: { result.0.options: 1 } - length: { suggest.result.0.options: 1 }
- match: { result.0.options.0.text: "Foo red" } - match: { suggest.result.0.options.0.text: "Foo red" }
- do: - do:
suggest: search:
body: body:
result: suggest:
text: "foo" result:
completion: text: "foo"
field: suggest_context_with_path completion:
contexts: field: suggest_context_with_path
color: "blue" contexts:
color: "blue"
- length: { result: 1 } - length: { suggest.result: 1 }
- length: { result.0.options: 1 } - length: { suggest.result.0.options: 1 }
- match: { result.0.options.0.text: "Foo blue" } - match: { suggest.result.0.options.0.text: "Foo blue" }
- do: - do:
suggest: search:
body: body:
result: suggest:
text: "foo" result:
completion: text: "foo"
field: suggest_context_with_path completion:
contexts: field: suggest_context_with_path
color: ["blue", "red"] contexts:
color: ["blue", "red"]
- length: { result: 1 } - length: { suggest.result: 1 }
- length: { result.0.options: 2 } - length: { suggest.result.0.options: 2 }
--- ---
"Geo suggest should work": "Geo suggest should work":
@ -190,21 +194,22 @@ setup:
indices.get_mapping: {} indices.get_mapping: {}
- do: - do:
suggest: search:
index: test index: test
body: body:
result: suggest:
text: "mar" result:
completion: text: "mar"
field: suggest_geo completion:
contexts: field: suggest_geo
location: contexts:
lat : 52.2263 location:
lon : 4.543 lat : 52.2263
lon : 4.543
- length: { result: 1 } - length: { suggest.result: 1 }
- length: { result.0.options: 1 } - length: { suggest.result.0.options: 1 }
- match: { result.0.options.0.text: "Marriot in Amsterdam" } - match: { suggest.result.0.options.0.text: "Marriot in Amsterdam" }
--- ---
"Multi contexts should work": "Multi contexts should work":
@ -240,33 +245,35 @@ setup:
indices.get_mapping: {} indices.get_mapping: {}
- do: - do:
suggest: search:
index: test index: test
body: body:
result: suggest:
text: "mar" result:
completion: text: "mar"
field: suggest_multi_contexts completion:
contexts: field: suggest_multi_contexts
location: contexts:
lat : 52.22 location:
lon : 4.53 lat : 52.22
lon : 4.53
- length: { result: 1 } - length: { suggest.result: 1 }
- length: { result.0.options: 1 } - length: { suggest.result.0.options: 1 }
- match: { result.0.options.0.text: "Marriot in Amsterdam" } - match: { suggest.result.0.options.0.text: "Marriot in Amsterdam" }
- do: - do:
suggest: search:
index: test index: test
body: body:
result: suggest:
text: "mar" result:
completion: text: "mar"
field: suggest_multi_contexts completion:
contexts: field: suggest_multi_contexts
color: "blue" contexts:
color: "blue"
- length: { result: 1 } - length: { suggest.result: 1 }
- length: { result.0.options: 1 } - length: { suggest.result.0.options: 1 }
- match: { result.0.options.0.text: "Marriot in Berlin" } - match: { suggest.result.0.options.0.text: "Marriot in Berlin" }

View File

@ -1,4 +1,5 @@
rootProject.name = 'elasticsearch' String dirName = rootProject.projectDir.name
rootProject.name = dirName
List projects = [ List projects = [
'build-tools', 'build-tools',
@ -88,7 +89,7 @@ if (isEclipse) {
/** /**
* Iterates over sub directories, looking for build.gradle, and adds a project if found * Iterates over sub directories, looking for build.gradle, and adds a project if found
* for that dir with the given path prefix. Note that this requires each level * for that dir with the given path prefix. Note that this requires each level
* of the dir hiearchy to have a build.gradle. Otherwise we would have to iterate * of the dir hierarchy to have a build.gradle. Otherwise we would have to iterate
* all files/directories in the source tree to find all projects. * all files/directories in the source tree to find all projects.
*/ */
void addSubProjects(String path, File dir) { void addSubProjects(String path, File dir) {
@ -98,17 +99,18 @@ void addSubProjects(String path, File dir) {
String projectName = "${path}:${dir.name}" String projectName = "${path}:${dir.name}"
include projectName include projectName
if (path.isEmpty()) {
project(projectName).projectDir = dir
}
for (File subdir : dir.listFiles()) { for (File subdir : dir.listFiles()) {
addSubProjects(projectName, subdir) addSubProjects(projectName, subdir)
} }
} }
// look for extra plugins for elasticsearch // look for extra plugins for elasticsearch
File xplugins = new File(rootProject.projectDir.parentFile, 'x-plugins') File extraProjects = new File(rootProject.projectDir.parentFile, "${dirName}-extra")
if (xplugins.exists()) { if (extraProjects.exists()) {
include ':x-plugins' for (File extraProjectDir : extraProjects.listFiles()) {
project(':x-plugins').projectDir = xplugins addSubProjects('', extraProjectDir)
for (File extraPluginDir : xplugins.listFiles()) {
addSubProjects(':x-plugins', extraPluginDir)
} }
} }

View File

@ -1065,7 +1065,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
} }
protected void ensureClusterSizeConsistency() { protected void ensureClusterSizeConsistency() {
if (cluster() != null) { // if static init fails the cluster can be null if (cluster() != null && cluster().size() > 0) { // if static init fails the cluster can be null
logger.trace("Check consistency for [{}] nodes", cluster().size()); logger.trace("Check consistency for [{}] nodes", cluster().size());
assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(cluster().size())).get()); assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(cluster().size())).get());
} }
@ -1075,7 +1075,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
* Verifies that all nodes that have the same version of the cluster state as master have same cluster state * Verifies that all nodes that have the same version of the cluster state as master have same cluster state
*/ */
protected void ensureClusterStateConsistency() throws IOException { protected void ensureClusterStateConsistency() throws IOException {
if (cluster() != null) { if (cluster() != null && cluster().size() > 0) {
ClusterState masterClusterState = client().admin().cluster().prepareState().all().get().getState(); ClusterState masterClusterState = client().admin().cluster().prepareState().all().get().getState();
byte[] masterClusterStateBytes = ClusterState.Builder.toBytes(masterClusterState); byte[] masterClusterStateBytes = ClusterState.Builder.toBytes(masterClusterState);
// remove local node reference // remove local node reference

View File

@ -130,6 +130,7 @@ import java.util.stream.Stream;
import static org.apache.lucene.util.LuceneTestCase.TEST_NIGHTLY; import static org.apache.lucene.util.LuceneTestCase.TEST_NIGHTLY;
import static org.apache.lucene.util.LuceneTestCase.rarely; import static org.apache.lucene.util.LuceneTestCase.rarely;
import static org.elasticsearch.discovery.zen.ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING;
import static org.elasticsearch.test.ESTestCase.assertBusy; import static org.elasticsearch.test.ESTestCase.assertBusy;
import static org.elasticsearch.test.ESTestCase.randomFrom; import static org.elasticsearch.test.ESTestCase.randomFrom;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
@ -589,12 +590,14 @@ public final class InternalTestCluster extends TestCluster {
.put(NodeEnvironment.NODE_ID_SEED_SETTING.getKey(), seed); .put(NodeEnvironment.NODE_ID_SEED_SETTING.getKey(), seed);
if (autoManageMinMasterNodes) { if (autoManageMinMasterNodes) {
assert finalSettings.get(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()) == null : assert finalSettings.get(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()) == null :
"min master nodes may not be set when auto managed"; "min master nodes may not be set when auto managed";
finalSettings finalSettings
// don't wait too long not to slow down tests // don't wait too long not to slow down tests
.put(ZenDiscovery.MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING.getKey(), "5s") .put(ZenDiscovery.MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING.getKey(), "5s")
.put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), defaultMinMasterNodes); .put(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), defaultMinMasterNodes);
} else if (finalSettings.get(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()) == null) {
throw new IllegalArgumentException(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey() + " must be configured");
} }
MockNode node = new MockNode(finalSettings.build(), plugins); MockNode node = new MockNode(finalSettings.build(), plugins);
return new NodeAndClient(name, node, nodeId); return new NodeAndClient(name, node, nodeId);
@ -883,8 +886,8 @@ public final class InternalTestCluster extends TestCluster {
newSettings.put(callbackSettings); newSettings.put(callbackSettings);
} }
if (minMasterNodes >= 0) { if (minMasterNodes >= 0) {
assert ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.exists(newSettings.build()) == false : "min master nodes is auto managed"; assert DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.exists(newSettings.build()) == false : "min master nodes is auto managed";
newSettings.put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minMasterNodes).build(); newSettings.put(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minMasterNodes).build();
} }
if (clearDataIfNeeded) { if (clearDataIfNeeded) {
clearDataIfNeeded(callback); clearDataIfNeeded(callback);
@ -908,6 +911,10 @@ public final class InternalTestCluster extends TestCluster {
private void createNewNode(final Settings newSettings) { private void createNewNode(final Settings newSettings) {
final long newIdSeed = NodeEnvironment.NODE_ID_SEED_SETTING.get(node.settings()) + 1; // use a new seed to make sure we have new node id final long newIdSeed = NodeEnvironment.NODE_ID_SEED_SETTING.get(node.settings()) + 1; // use a new seed to make sure we have new node id
Settings finalSettings = Settings.builder().put(node.settings()).put(newSettings).put(NodeEnvironment.NODE_ID_SEED_SETTING.getKey(), newIdSeed).build(); Settings finalSettings = Settings.builder().put(node.settings()).put(newSettings).put(NodeEnvironment.NODE_ID_SEED_SETTING.getKey(), newIdSeed).build();
if (DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.exists(finalSettings) == false) {
throw new IllegalStateException(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey() +
" is not configured after restart of [" + name + "]");
}
Collection<Class<? extends Plugin>> plugins = node.getClasspathPlugins(); Collection<Class<? extends Plugin>> plugins = node.getClasspathPlugins();
node = new MockNode(finalSettings, plugins); node = new MockNode(finalSettings, plugins);
markNodeDataDirsAsNotEligableForWipe(node); markNodeDataDirsAsNotEligableForWipe(node);
@ -1694,7 +1701,7 @@ public final class InternalTestCluster extends TestCluster {
logger.debug("updating min_master_nodes to [{}]", minMasterNodes); logger.debug("updating min_master_nodes to [{}]", minMasterNodes);
try { try {
assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings( assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(
Settings.builder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minMasterNodes) Settings.builder().put(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minMasterNodes)
)); ));
} catch (Exception e) { } catch (Exception e) {
throw new ElasticsearchException("failed to update minimum master node to [{}] (current masters [{}])", e, throw new ElasticsearchException("failed to update minimum master node to [{}] (current masters [{}])", e,

View File

@ -46,9 +46,11 @@ import org.junit.After;
import org.junit.Before; import org.junit.Before;
import java.io.IOException; import java.io.IOException;
import java.io.UncheckedIOException;
import java.net.InetAddress; import java.net.InetAddress;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.net.ServerSocket; import java.net.ServerSocket;
import java.net.Socket;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
@ -1847,4 +1849,39 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
assertEquals("[][" + dummy.getAddress() +"] handshake_timeout[1ms]", ex.getMessage()); assertEquals("[][" + dummy.getAddress() +"] handshake_timeout[1ms]", ex.getMessage());
} }
} }
public void testTcpHandshakeConnectionReset() throws IOException, InterruptedException {
try (ServerSocket socket = new ServerSocket()) {
socket.bind(new InetSocketAddress(InetAddress.getLocalHost(), 0), 1);
socket.setReuseAddress(true);
DiscoveryNode dummy = new DiscoveryNode("TEST", new TransportAddress(socket.getInetAddress(),
socket.getLocalPort()), emptyMap(),
emptySet(), version0);
Thread t = new Thread() {
@Override
public void run() {
try {
Socket accept = socket.accept();
accept.close();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
};
t.start();
ConnectionProfile.Builder builder = new ConnectionProfile.Builder();
builder.addConnections(1,
TransportRequestOptions.Type.BULK,
TransportRequestOptions.Type.PING,
TransportRequestOptions.Type.RECOVERY,
TransportRequestOptions.Type.REG,
TransportRequestOptions.Type.STATE);
builder.setHandshakeTimeout(TimeValue.timeValueHours(1));
ConnectTransportException ex = expectThrows(ConnectTransportException.class,
() -> serviceA.connectToNode(dummy, builder.build()));
assertEquals("[][" + dummy.getAddress() +"] general node connection failure", ex.getMessage());
assertEquals("handshake failed", ex.getCause().getMessage());
t.join();
}
}
} }

View File

@ -336,6 +336,7 @@ public class MockTcpTransport extends TcpTransport<MockTcpTransport.MockChannel>
if (isOpen.compareAndSet(true, false)) { if (isOpen.compareAndSet(true, false)) {
//establish a happens-before edge between closing and accepting a new connection //establish a happens-before edge between closing and accepting a new connection
synchronized (this) { synchronized (this) {
onChannelClosed(this);
IOUtils.close(serverSocket, activeChannel, () -> IOUtils.close(workerChannels.keySet()), IOUtils.close(serverSocket, activeChannel, () -> IOUtils.close(workerChannels.keySet()),
() -> cancellableThreads.cancel("channel closed"), onClose); () -> cancellableThreads.cancel("channel closed"), onClose);
} }

View File

@ -36,7 +36,6 @@ import org.elasticsearch.test.NodeConfigurationSource;
import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.test.discovery.TestZenDiscovery;
import org.elasticsearch.transport.MockTcpTransportPlugin; import org.elasticsearch.transport.MockTcpTransportPlugin;
import org.elasticsearch.transport.TransportSettings; import org.elasticsearch.transport.TransportSettings;
import org.hamcrest.Matcher;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
@ -61,7 +60,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFile
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileNotExists; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileNotExists;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.not;
/** /**
@ -137,22 +135,15 @@ public class InternalTestClusterTests extends ESTestCase {
private void assertMMNinNodeSetting(String node, InternalTestCluster cluster, int masterNodes) { private void assertMMNinNodeSetting(String node, InternalTestCluster cluster, int masterNodes) {
final int minMasterNodes = masterNodes / 2 + 1; final int minMasterNodes = masterNodes / 2 + 1;
final Matcher<Map<? extends String, ? extends String>> minMasterMatcher =
hasEntry(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), Integer.toString(minMasterNodes));
final Matcher<Map<? extends String, ?>> noMinMasterNodesMatcher = not(hasKey(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()));
Settings nodeSettings = cluster.client(node).admin().cluster().prepareNodesInfo(node).get().getNodes().get(0).getSettings(); Settings nodeSettings = cluster.client(node).admin().cluster().prepareNodesInfo(node).get().getNodes().get(0).getSettings();
assertThat("node setting of node [" + node + "] has the wrong min_master_node setting: [" assertThat("node setting of node [" + node + "] has the wrong min_master_node setting: ["
+ nodeSettings.get(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()) + "]", + nodeSettings.get(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()) + "]",
nodeSettings.getAsMap(), nodeSettings.getAsMap(),
cluster.getAutoManageMinMasterNode() ? minMasterMatcher: noMinMasterNodesMatcher); hasEntry(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), Integer.toString(minMasterNodes)));
} }
private void assertMMNinClusterSetting(InternalTestCluster cluster, int masterNodes) { private void assertMMNinClusterSetting(InternalTestCluster cluster, int masterNodes) {
final int minMasterNodes = masterNodes / 2 + 1; final int minMasterNodes = masterNodes / 2 + 1;
Matcher<Map<? extends String, ? extends String>> minMasterMatcher =
hasEntry(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), Integer.toString(minMasterNodes));
Matcher<Map<? extends String, ?>> noMinMasterNodesMatcher = not(hasKey(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()));
for (final String node : cluster.getNodeNames()) { for (final String node : cluster.getNodeNames()) {
Settings stateSettings = cluster.client(node).admin().cluster().prepareState().setLocal(true) Settings stateSettings = cluster.client(node).admin().cluster().prepareState().setLocal(true)
.get().getState().getMetaData().settings(); .get().getState().getMetaData().settings();
@ -160,27 +151,44 @@ public class InternalTestClusterTests extends ESTestCase {
assertThat("dynamic setting for node [" + node + "] has the wrong min_master_node setting : [" assertThat("dynamic setting for node [" + node + "] has the wrong min_master_node setting : ["
+ stateSettings.get(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()) + "]", + stateSettings.get(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()) + "]",
stateSettings.getAsMap(), stateSettings.getAsMap(),
cluster.getAutoManageMinMasterNode() ? minMasterMatcher: noMinMasterNodesMatcher); hasEntry(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), Integer.toString(minMasterNodes)));
} }
} }
public void testBeforeTest() throws Exception { public void testBeforeTest() throws Exception {
final boolean autoManageMinMasterNodes = randomBoolean();
long clusterSeed = randomLong(); long clusterSeed = randomLong();
boolean masterNodes = randomBoolean(); final boolean masterNodes;
int minNumDataNodes = randomIntBetween(0, 3); final int minNumDataNodes;
int maxNumDataNodes = randomIntBetween(minNumDataNodes, 4); final int maxNumDataNodes;
int numClientNodes = randomIntBetween(0, 2); if (autoManageMinMasterNodes) {
masterNodes = randomBoolean();
minNumDataNodes = randomIntBetween(0, 3);
maxNumDataNodes = randomIntBetween(minNumDataNodes, 4);
} else {
// if we manage min master nodes, we need to lock down the number of nodes
minNumDataNodes = randomIntBetween(0, 4);
maxNumDataNodes = minNumDataNodes;
masterNodes = false;
}
final int numClientNodes = randomIntBetween(0, 2);
final String clusterName1 = "shared1"; final String clusterName1 = "shared1";
final String clusterName2 = "shared2"; final String clusterName2 = "shared2";
NodeConfigurationSource nodeConfigurationSource = new NodeConfigurationSource() { NodeConfigurationSource nodeConfigurationSource = new NodeConfigurationSource() {
@Override @Override
public Settings nodeSettings(int nodeOrdinal) { public Settings nodeSettings(int nodeOrdinal) {
return Settings.builder() final Settings.Builder settings = Settings.builder()
.put( .put(
NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(),
2 * ((masterNodes ? InternalTestCluster.DEFAULT_HIGH_NUM_MASTER_NODES : 0) + maxNumDataNodes + numClientNodes)) 2 * ((masterNodes ? InternalTestCluster.DEFAULT_HIGH_NUM_MASTER_NODES : 0) + maxNumDataNodes + numClientNodes))
.put(NetworkModule.HTTP_ENABLED.getKey(), false) .put(NetworkModule.HTTP_ENABLED.getKey(), false)
.put(NetworkModule.TRANSPORT_TYPE_KEY, MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME).build(); .put(NetworkModule.TRANSPORT_TYPE_KEY, MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME);
if (autoManageMinMasterNodes == false) {
assert minNumDataNodes == maxNumDataNodes;
assert masterNodes == false;
settings.put(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minNumDataNodes / 2 + 1);
}
return settings.build();
} }
@Override @Override
@ -195,7 +203,6 @@ public class InternalTestClusterTests extends ESTestCase {
Path baseDir = createTempDir(); Path baseDir = createTempDir();
final List<Class<? extends Plugin>> mockPlugins = Arrays.asList(MockTcpTransportPlugin.class, TestZenDiscovery.TestPlugin.class); final List<Class<? extends Plugin>> mockPlugins = Arrays.asList(MockTcpTransportPlugin.class, TestZenDiscovery.TestPlugin.class);
final boolean autoManageMinMasterNodes = randomBoolean();
InternalTestCluster cluster0 = new InternalTestCluster(clusterSeed, baseDir, masterNodes, InternalTestCluster cluster0 = new InternalTestCluster(clusterSeed, baseDir, masterNodes,
autoManageMinMasterNodes, minNumDataNodes, maxNumDataNodes, clusterName1, nodeConfigurationSource, numClientNodes, autoManageMinMasterNodes, minNumDataNodes, maxNumDataNodes, clusterName1, nodeConfigurationSource, numClientNodes,
enableHttpPipelining, nodePrefix, mockPlugins, Function.identity()); enableHttpPipelining, nodePrefix, mockPlugins, Function.identity());
@ -258,9 +265,8 @@ public class InternalTestClusterTests extends ESTestCase {
boolean enableHttpPipelining = randomBoolean(); boolean enableHttpPipelining = randomBoolean();
String nodePrefix = "test"; String nodePrefix = "test";
Path baseDir = createTempDir(); Path baseDir = createTempDir();
final boolean autoManageMinMasterNodes = randomBoolean();
InternalTestCluster cluster = new InternalTestCluster(clusterSeed, baseDir, masterNodes, InternalTestCluster cluster = new InternalTestCluster(clusterSeed, baseDir, masterNodes,
autoManageMinMasterNodes, minNumDataNodes, maxNumDataNodes, clusterName1, nodeConfigurationSource, numClientNodes, true, minNumDataNodes, maxNumDataNodes, clusterName1, nodeConfigurationSource, numClientNodes,
enableHttpPipelining, nodePrefix, Arrays.asList(MockTcpTransportPlugin.class, TestZenDiscovery.TestPlugin.class), enableHttpPipelining, nodePrefix, Arrays.asList(MockTcpTransportPlugin.class, TestZenDiscovery.TestPlugin.class),
Function.identity()); Function.identity());
try { try {