Merge branch 'master' into feature/rank-eval
This commit is contained in:
commit
5618d6ca49
|
@ -474,10 +474,10 @@ gradle run --debug-jvm
|
|||
== Building with extra plugins
|
||||
Additional plugins may be built alongside elasticsearch, where their
|
||||
dependency on elasticsearch will be substituted with the local elasticsearch
|
||||
build. To add your plugin, create a directory called x-plugins as a sibling
|
||||
of elasticsearch. Checkout your plugin underneath x-plugins and the build
|
||||
will automatically pick it up. You can verify the plugin is included as part
|
||||
of the build by checking the projects of the build.
|
||||
build. To add your plugin, create a directory called elasticsearch-extra as
|
||||
a sibling of elasticsearch. Checkout your plugin underneath elasticsearch-extra
|
||||
and the build will automatically pick it up. You can verify the plugin is
|
||||
included as part of the build by checking the projects of the build.
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
gradle projects
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.common.io.stream.Writeable;
|
|||
import org.elasticsearch.common.logging.LoggerMessageFormat;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
@ -43,6 +44,8 @@ import java.util.stream.Collectors;
|
|||
|
||||
import static java.util.Collections.unmodifiableMap;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_UUID_NA_VALUE;
|
||||
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName;
|
||||
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
|
||||
|
||||
/**
|
||||
* A base class for all elasticsearch exceptions.
|
||||
|
@ -71,6 +74,14 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
|
|||
private static final String RESOURCE_HEADER_TYPE_KEY = "es.resource.type";
|
||||
private static final String RESOURCE_HEADER_ID_KEY = "es.resource.id";
|
||||
|
||||
private static final String TYPE = "type";
|
||||
private static final String REASON = "reason";
|
||||
private static final String CAUSED_BY = "caused_by";
|
||||
private static final String STACK_TRACE = "stack_trace";
|
||||
private static final String HEADER = "header";
|
||||
private static final String ERROR = "error";
|
||||
private static final String ROOT_CAUSE = "root_cause";
|
||||
|
||||
private static final Map<Integer, FunctionThatThrowsIOException<StreamInput, ? extends ElasticsearchException>> ID_TO_SUPPLIER;
|
||||
private static final Map<Class<? extends ElasticsearchException>, ElasticsearchExceptionHandle> CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE;
|
||||
private final Map<String, List<String>> headers = new HashMap<>();
|
||||
|
@ -247,8 +258,8 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
|
|||
if (ex != this) {
|
||||
toXContent(builder, params, this);
|
||||
} else {
|
||||
builder.field("type", getExceptionName());
|
||||
builder.field("reason", getMessage());
|
||||
builder.field(TYPE, getExceptionName());
|
||||
builder.field(REASON, getMessage());
|
||||
for (String key : headers.keySet()) {
|
||||
if (key.startsWith("es.")) {
|
||||
List<String> values = headers.get(key);
|
||||
|
@ -258,7 +269,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
|
|||
innerToXContent(builder, params);
|
||||
renderHeader(builder, params);
|
||||
if (params.paramAsBoolean(REST_EXCEPTION_SKIP_STACK_TRACE, REST_EXCEPTION_SKIP_STACK_TRACE_DEFAULT) == false) {
|
||||
builder.field("stack_trace", ExceptionsHelper.stackTrace(this));
|
||||
builder.field(STACK_TRACE, ExceptionsHelper.stackTrace(this));
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
|
@ -277,7 +288,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
|
|||
protected void causeToXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
final Throwable cause = getCause();
|
||||
if (cause != null && params.paramAsBoolean(REST_EXCEPTION_SKIP_CAUSE, REST_EXCEPTION_SKIP_CAUSE_DEFAULT) == false) {
|
||||
builder.field("caused_by");
|
||||
builder.field(CAUSED_BY);
|
||||
builder.startObject();
|
||||
toXContent(builder, params, cause);
|
||||
builder.endObject();
|
||||
|
@ -291,7 +302,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
|
|||
continue;
|
||||
}
|
||||
if (hasHeader == false) {
|
||||
builder.startObject("header");
|
||||
builder.startObject(HEADER);
|
||||
hasHeader = true;
|
||||
}
|
||||
List<String> values = headers.get(key);
|
||||
|
@ -324,20 +335,74 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
|
|||
if (ex instanceof ElasticsearchException) {
|
||||
((ElasticsearchException) ex).toXContent(builder, params);
|
||||
} else {
|
||||
builder.field("type", getExceptionName(ex));
|
||||
builder.field("reason", ex.getMessage());
|
||||
builder.field(TYPE, getExceptionName(ex));
|
||||
builder.field(REASON, ex.getMessage());
|
||||
if (ex.getCause() != null) {
|
||||
builder.field("caused_by");
|
||||
builder.field(CAUSED_BY);
|
||||
builder.startObject();
|
||||
toXContent(builder, params, ex.getCause());
|
||||
builder.endObject();
|
||||
}
|
||||
if (params.paramAsBoolean(REST_EXCEPTION_SKIP_STACK_TRACE, REST_EXCEPTION_SKIP_STACK_TRACE_DEFAULT) == false) {
|
||||
builder.field("stack_trace", ExceptionsHelper.stackTrace(ex));
|
||||
builder.field(STACK_TRACE, ExceptionsHelper.stackTrace(ex));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a {@link ElasticsearchException} from a {@link XContentParser}. This does not
|
||||
* return the original exception type (ie NodeClosedException for example) but just wraps
|
||||
* the type, the reason and the cause of the exception. It also recursively parses the
|
||||
* tree structure of the cause, returning it as a tree structure of {@link ElasticsearchException}
|
||||
* instances.
|
||||
*/
|
||||
public static ElasticsearchException fromXContent(XContentParser parser) throws IOException {
|
||||
XContentParser.Token token = ensureFieldName(parser.nextToken(), parser::getTokenLocation);
|
||||
|
||||
String type = null, reason = null, stack = null;
|
||||
ElasticsearchException cause = null;
|
||||
Map<String, Object> headers = new HashMap<>();
|
||||
|
||||
do {
|
||||
String currentFieldName = parser.currentName();
|
||||
token = parser.nextToken();
|
||||
if (token.isValue()) {
|
||||
if (TYPE.equals(currentFieldName)) {
|
||||
type = parser.text();
|
||||
} else if (REASON.equals(currentFieldName)) {
|
||||
reason = parser.text();
|
||||
} else if (STACK_TRACE.equals(currentFieldName)) {
|
||||
stack = parser.text();
|
||||
} else {
|
||||
// Everything else is considered as a header
|
||||
headers.put(currentFieldName, parser.text());
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (CAUSED_BY.equals(currentFieldName)) {
|
||||
cause = fromXContent(parser);
|
||||
} else if (HEADER.equals(currentFieldName)) {
|
||||
headers.putAll(parser.map());
|
||||
} else {
|
||||
throwUnknownField(currentFieldName, parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
} while ((token = parser.nextToken()) == XContentParser.Token.FIELD_NAME);
|
||||
|
||||
StringBuilder message = new StringBuilder("Elasticsearch exception [");
|
||||
message.append(TYPE).append('=').append(type).append(", ");
|
||||
message.append(REASON).append('=').append(reason);
|
||||
if (stack != null) {
|
||||
message.append(", ").append(STACK_TRACE).append('=').append(stack);
|
||||
}
|
||||
message.append(']');
|
||||
|
||||
ElasticsearchException e = new ElasticsearchException(message.toString(), cause);
|
||||
for (Map.Entry<String, Object> header : headers.entrySet()) {
|
||||
e.addHeader(header.getKey(), String.valueOf(header.getValue()));
|
||||
}
|
||||
return e;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the root cause of this exception or multiple if different shards caused different exceptions
|
||||
*/
|
||||
|
@ -809,9 +874,9 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
|
|||
}
|
||||
|
||||
public static void renderException(XContentBuilder builder, Params params, Exception e) throws IOException {
|
||||
builder.startObject("error");
|
||||
builder.startObject(ERROR);
|
||||
final ElasticsearchException[] rootCauses = ElasticsearchException.guessRootCauses(e);
|
||||
builder.field("root_cause");
|
||||
builder.field(ROOT_CAUSE);
|
||||
builder.startArray();
|
||||
for (ElasticsearchException rootCause : rootCauses) {
|
||||
builder.startObject();
|
||||
|
|
|
@ -22,22 +22,20 @@ package org.elasticsearch.action.admin.cluster.repositories.put;
|
|||
import org.elasticsearch.ElasticsearchGenerationException;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
|
||||
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
|
||||
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
|
||||
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
|
||||
|
||||
/**
|
||||
* Register repository request.
|
||||
|
@ -198,18 +196,8 @@ public class PutRepositoryRequest extends AcknowledgedRequest<PutRepositoryReque
|
|||
*
|
||||
* @param repositoryDefinition repository definition
|
||||
*/
|
||||
public PutRepositoryRequest source(XContentBuilder repositoryDefinition) {
|
||||
return source(repositoryDefinition.bytes());
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses repository definition.
|
||||
*
|
||||
* @param repositoryDefinition repository definition
|
||||
*/
|
||||
public PutRepositoryRequest source(Map repositoryDefinition) {
|
||||
Map<String, Object> source = repositoryDefinition;
|
||||
for (Map.Entry<String, Object> entry : source.entrySet()) {
|
||||
public PutRepositoryRequest source(Map<String, Object> repositoryDefinition) {
|
||||
for (Map.Entry<String, Object> entry : repositoryDefinition.entrySet()) {
|
||||
String name = entry.getKey();
|
||||
if (name.equals("type")) {
|
||||
type(entry.getValue().toString());
|
||||
|
@ -217,64 +205,14 @@ public class PutRepositoryRequest extends AcknowledgedRequest<PutRepositoryReque
|
|||
if (!(entry.getValue() instanceof Map)) {
|
||||
throw new IllegalArgumentException("Malformed settings section, should include an inner object");
|
||||
}
|
||||
settings((Map<String, Object>) entry.getValue());
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> sub = (Map<String, Object>) entry.getValue();
|
||||
settings(sub);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses repository definition.
|
||||
* JSON, Smile and YAML formats are supported
|
||||
*
|
||||
* @param repositoryDefinition repository definition
|
||||
*/
|
||||
public PutRepositoryRequest source(String repositoryDefinition) {
|
||||
try (XContentParser parser = XContentFactory.xContent(repositoryDefinition).createParser(repositoryDefinition)) {
|
||||
return source(parser.mapOrdered());
|
||||
} catch (IOException e) {
|
||||
throw new IllegalArgumentException("failed to parse repository source [" + repositoryDefinition + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses repository definition.
|
||||
* JSON, Smile and YAML formats are supported
|
||||
*
|
||||
* @param repositoryDefinition repository definition
|
||||
*/
|
||||
public PutRepositoryRequest source(byte[] repositoryDefinition) {
|
||||
return source(repositoryDefinition, 0, repositoryDefinition.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses repository definition.
|
||||
* JSON, Smile and YAML formats are supported
|
||||
*
|
||||
* @param repositoryDefinition repository definition
|
||||
*/
|
||||
public PutRepositoryRequest source(byte[] repositoryDefinition, int offset, int length) {
|
||||
try (XContentParser parser = XContentFactory.xContent(repositoryDefinition, offset, length).createParser(repositoryDefinition, offset, length)) {
|
||||
return source(parser.mapOrdered());
|
||||
} catch (IOException e) {
|
||||
throw new IllegalArgumentException("failed to parse repository source", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses repository definition.
|
||||
* JSON, Smile and YAML formats are supported
|
||||
*
|
||||
* @param repositoryDefinition repository definition
|
||||
*/
|
||||
public PutRepositoryRequest source(BytesReference repositoryDefinition) {
|
||||
try (XContentParser parser = XContentFactory.xContent(repositoryDefinition).createParser(repositoryDefinition)) {
|
||||
return source(parser.mapOrdered());
|
||||
} catch (IOException e) {
|
||||
throw new IllegalArgumentException("failed to parse template source", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
|
|
|
@ -25,13 +25,11 @@ import org.elasticsearch.action.IndicesRequest;
|
|||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.support.master.MasterNodeRequest;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -41,10 +39,9 @@ import java.util.Map;
|
|||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
import static org.elasticsearch.common.Strings.EMPTY_ARRAY;
|
||||
import static org.elasticsearch.common.Strings.hasLength;
|
||||
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
|
||||
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
|
||||
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
|
||||
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
|
||||
|
||||
/**
|
||||
|
@ -357,17 +354,7 @@ public class CreateSnapshotRequest extends MasterNodeRequest<CreateSnapshotReque
|
|||
* @param source snapshot definition
|
||||
* @return this request
|
||||
*/
|
||||
public CreateSnapshotRequest source(XContentBuilder source) {
|
||||
return source(source.bytes());
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses snapshot definition.
|
||||
*
|
||||
* @param source snapshot definition
|
||||
* @return this request
|
||||
*/
|
||||
public CreateSnapshotRequest source(Map source) {
|
||||
public CreateSnapshotRequest source(Map<String, Object> source) {
|
||||
for (Map.Entry<String, Object> entry : ((Map<String, Object>) source).entrySet()) {
|
||||
String name = entry.getKey();
|
||||
if (name.equals("indices")) {
|
||||
|
@ -393,66 +380,6 @@ public class CreateSnapshotRequest extends MasterNodeRequest<CreateSnapshotReque
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses snapshot definition. JSON, YAML and properties formats are supported
|
||||
*
|
||||
* @param source snapshot definition
|
||||
* @return this request
|
||||
*/
|
||||
public CreateSnapshotRequest source(String source) {
|
||||
if (hasLength(source)) {
|
||||
try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) {
|
||||
return source(parser.mapOrdered());
|
||||
} catch (Exception e) {
|
||||
throw new IllegalArgumentException("failed to parse repository source [" + source + "]", e);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses snapshot definition. JSON, YAML and properties formats are supported
|
||||
*
|
||||
* @param source snapshot definition
|
||||
* @return this request
|
||||
*/
|
||||
public CreateSnapshotRequest source(byte[] source) {
|
||||
return source(source, 0, source.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses snapshot definition. JSON, YAML and properties formats are supported
|
||||
*
|
||||
* @param source snapshot definition
|
||||
* @param offset offset
|
||||
* @param length length
|
||||
* @return this request
|
||||
*/
|
||||
public CreateSnapshotRequest source(byte[] source, int offset, int length) {
|
||||
if (length > 0) {
|
||||
try (XContentParser parser = XContentFactory.xContent(source, offset, length).createParser(source, offset, length)) {
|
||||
return source(parser.mapOrdered());
|
||||
} catch (IOException e) {
|
||||
throw new IllegalArgumentException("failed to parse repository source", e);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses snapshot definition. JSON, YAML and properties formats are supported
|
||||
*
|
||||
* @param source snapshot definition
|
||||
* @return this request
|
||||
*/
|
||||
public CreateSnapshotRequest source(BytesReference source) {
|
||||
try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) {
|
||||
return source(parser.mapOrdered());
|
||||
} catch (IOException e) {
|
||||
throw new IllegalArgumentException("failed to parse snapshot source", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
|
|
|
@ -24,13 +24,11 @@ import org.elasticsearch.action.ActionRequestValidationException;
|
|||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.support.master.MasterNodeRequest;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -39,10 +37,9 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
import static org.elasticsearch.common.Strings.hasLength;
|
||||
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
|
||||
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
|
||||
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
|
||||
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
|
||||
|
||||
/**
|
||||
|
@ -472,22 +469,8 @@ public class RestoreSnapshotRequest extends MasterNodeRequest<RestoreSnapshotReq
|
|||
* @param source restore definition
|
||||
* @return this request
|
||||
*/
|
||||
public RestoreSnapshotRequest source(XContentBuilder source) {
|
||||
try {
|
||||
return source(source.bytes());
|
||||
} catch (Exception e) {
|
||||
throw new IllegalArgumentException("Failed to build json for repository request", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses restore definition
|
||||
*
|
||||
* @param source restore definition
|
||||
* @return this request
|
||||
*/
|
||||
public RestoreSnapshotRequest source(Map source) {
|
||||
for (Map.Entry<String, Object> entry : ((Map<String, Object>) source).entrySet()) {
|
||||
public RestoreSnapshotRequest source(Map<String, Object> source) {
|
||||
for (Map.Entry<String, Object> entry : source.entrySet()) {
|
||||
String name = entry.getKey();
|
||||
if (name.equals("indices")) {
|
||||
if (entry.getValue() instanceof String) {
|
||||
|
@ -543,74 +526,6 @@ public class RestoreSnapshotRequest extends MasterNodeRequest<RestoreSnapshotReq
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses restore definition
|
||||
* <p>
|
||||
* JSON, YAML and properties formats are supported
|
||||
*
|
||||
* @param source restore definition
|
||||
* @return this request
|
||||
*/
|
||||
public RestoreSnapshotRequest source(String source) {
|
||||
if (hasLength(source)) {
|
||||
try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) {
|
||||
return source(parser.mapOrdered());
|
||||
} catch (Exception e) {
|
||||
throw new IllegalArgumentException("failed to parse repository source [" + source + "]", e);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses restore definition
|
||||
* <p>
|
||||
* JSON, YAML and properties formats are supported
|
||||
*
|
||||
* @param source restore definition
|
||||
* @return this request
|
||||
*/
|
||||
public RestoreSnapshotRequest source(byte[] source) {
|
||||
return source(source, 0, source.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses restore definition
|
||||
* <p>
|
||||
* JSON, YAML and properties formats are supported
|
||||
*
|
||||
* @param source restore definition
|
||||
* @param offset offset
|
||||
* @param length length
|
||||
* @return this request
|
||||
*/
|
||||
public RestoreSnapshotRequest source(byte[] source, int offset, int length) {
|
||||
if (length > 0) {
|
||||
try (XContentParser parser = XContentFactory.xContent(source, offset, length).createParser(source, offset, length)) {
|
||||
return source(parser.mapOrdered());
|
||||
} catch (IOException e) {
|
||||
throw new IllegalArgumentException("failed to parse repository source", e);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses restore definition
|
||||
* <p>
|
||||
* JSON, YAML and properties formats are supported
|
||||
*
|
||||
* @param source restore definition
|
||||
* @return this request
|
||||
*/
|
||||
public RestoreSnapshotRequest source(BytesReference source) {
|
||||
try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) {
|
||||
return source(parser.mapOrdered());
|
||||
} catch (IOException e) {
|
||||
throw new IllegalArgumentException("failed to parse template source", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.action.admin.indices.rollover;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.IndicesRequest;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
|
@ -26,16 +25,11 @@ import org.elasticsearch.action.support.ActiveShardCount;
|
|||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParseFieldMatcherSupplier;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashSet;
|
||||
|
@ -50,7 +44,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
|
|||
*/
|
||||
public class RolloverRequest extends AcknowledgedRequest<RolloverRequest> implements IndicesRequest {
|
||||
|
||||
public static ObjectParser<RolloverRequest, ParseFieldMatcherSupplier> PARSER =
|
||||
public static final ObjectParser<RolloverRequest, ParseFieldMatcherSupplier> PARSER =
|
||||
new ObjectParser<>("conditions", null);
|
||||
static {
|
||||
PARSER.declareField((parser, request, parseFieldMatcherSupplier) ->
|
||||
|
@ -194,19 +188,6 @@ public class RolloverRequest extends AcknowledgedRequest<RolloverRequest> implem
|
|||
return createIndexRequest;
|
||||
}
|
||||
|
||||
public void source(BytesReference source) {
|
||||
XContentType xContentType = XContentFactory.xContentType(source);
|
||||
if (xContentType != null) {
|
||||
try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(source)) {
|
||||
PARSER.parse(parser, this, () -> ParseFieldMatcher.EMPTY);
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchParseException("failed to parse source for rollover index", e);
|
||||
}
|
||||
} else {
|
||||
throw new ElasticsearchParseException("failed to parse content type for rollover index source");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the number of shard copies that should be active for creation of the
|
||||
* new rollover index to return. Defaults to {@link ActiveShardCount#DEFAULT}, which will
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.action.admin.indices.shrink;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.IndicesRequest;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
|
@ -26,15 +25,10 @@ import org.elasticsearch.action.support.ActiveShardCount;
|
|||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParseFieldMatcherSupplier;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
@ -46,7 +40,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
|
|||
*/
|
||||
public class ShrinkRequest extends AcknowledgedRequest<ShrinkRequest> implements IndicesRequest {
|
||||
|
||||
public static ObjectParser<ShrinkRequest, ParseFieldMatcherSupplier> PARSER =
|
||||
public static final ObjectParser<ShrinkRequest, ParseFieldMatcherSupplier> PARSER =
|
||||
new ObjectParser<>("shrink_request", null);
|
||||
static {
|
||||
PARSER.declareField((parser, request, parseFieldMatcherSupplier) ->
|
||||
|
@ -152,17 +146,4 @@ public class ShrinkRequest extends AcknowledgedRequest<ShrinkRequest> implements
|
|||
public void setWaitForActiveShards(final int waitForActiveShards) {
|
||||
setWaitForActiveShards(ActiveShardCount.from(waitForActiveShards));
|
||||
}
|
||||
|
||||
public void source(BytesReference source) {
|
||||
XContentType xContentType = XContentFactory.xContentType(source);
|
||||
if (xContentType != null) {
|
||||
try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(source)) {
|
||||
PARSER.parse(parser, this, () -> ParseFieldMatcher.EMPTY);
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchParseException("failed to parse source for shrink index", e);
|
||||
}
|
||||
} else {
|
||||
throw new ElasticsearchParseException("failed to parse content type for shrink index source");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.elasticsearch.action.ActionRequest;
|
|||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.CompositeIndicesRequest;
|
||||
import org.elasticsearch.action.DocWriteRequest;
|
||||
import org.elasticsearch.action.IndicesRequest;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.support.ActiveShardCount;
|
||||
|
@ -400,8 +399,10 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
|||
UpdateRequest updateRequest = new UpdateRequest(index, type, id).routing(routing).parent(parent).retryOnConflict(retryOnConflict)
|
||||
.version(version).versionType(versionType)
|
||||
.routing(routing)
|
||||
.parent(parent)
|
||||
.fromXContent(data.slice(from, nextMarker - from));
|
||||
.parent(parent);
|
||||
try (XContentParser sliceParser = xContent.createParser(data.slice(from, nextMarker - from))) {
|
||||
updateRequest.fromXContent(sliceParser);
|
||||
}
|
||||
if (fetchSourceContext != null) {
|
||||
updateRequest.fetchSource(fetchSourceContext);
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.elasticsearch.rest.RestStatus;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Base class for write action responses.
|
||||
|
@ -120,6 +121,25 @@ public class ReplicationResponse extends ActionResponse {
|
|||
return status;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object that) {
|
||||
if (this == that) {
|
||||
return true;
|
||||
}
|
||||
if (that == null || getClass() != that.getClass()) {
|
||||
return false;
|
||||
}
|
||||
ShardInfo other = (ShardInfo) that;
|
||||
return Objects.equals(total, other.total) &&
|
||||
Objects.equals(successful, other.successful) &&
|
||||
Arrays.equals(failures, other.failures);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(total, successful, failures);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
total = in.readVInt();
|
||||
|
@ -251,6 +271,27 @@ public class ReplicationResponse extends ActionResponse {
|
|||
return primary;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object that) {
|
||||
if (this == that) {
|
||||
return true;
|
||||
}
|
||||
if (that == null || getClass() != that.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Failure failure = (Failure) that;
|
||||
return Objects.equals(primary, failure.primary) &&
|
||||
Objects.equals(shardId, failure.shardId) &&
|
||||
Objects.equals(nodeId, failure.nodeId) &&
|
||||
Objects.equals(cause, failure.cause) &&
|
||||
Objects.equals(status, failure.status);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(shardId, nodeId, cause, status, primary);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
shardId = ShardId.readShardId(in);
|
||||
|
|
|
@ -28,8 +28,6 @@ import org.elasticsearch.action.support.replication.ReplicationRequest;
|
|||
import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequest;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
|
@ -689,18 +687,6 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
return upsertRequest;
|
||||
}
|
||||
|
||||
public UpdateRequest fromXContent(XContentBuilder source) throws Exception {
|
||||
return fromXContent(source.bytes());
|
||||
}
|
||||
|
||||
public UpdateRequest fromXContent(byte[] source) throws Exception {
|
||||
return fromXContent(source, 0, source.length);
|
||||
}
|
||||
|
||||
public UpdateRequest fromXContent(byte[] source, int offset, int length) throws Exception {
|
||||
return fromXContent(new BytesArray(source, offset, length));
|
||||
}
|
||||
|
||||
/**
|
||||
* Should this update attempt to detect if it is a noop? Defaults to true.
|
||||
* @return this for chaining
|
||||
|
@ -717,52 +703,48 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
return detectNoop;
|
||||
}
|
||||
|
||||
public UpdateRequest fromXContent(BytesReference source) throws IOException {
|
||||
public UpdateRequest fromXContent(XContentParser parser) throws IOException {
|
||||
Script script = null;
|
||||
try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) {
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token == null) {
|
||||
return this;
|
||||
}
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if ("script".equals(currentFieldName)) {
|
||||
script = Script.parse(parser, ParseFieldMatcher.EMPTY);
|
||||
} else if ("scripted_upsert".equals(currentFieldName)) {
|
||||
scriptedUpsert = parser.booleanValue();
|
||||
} else if ("upsert".equals(currentFieldName)) {
|
||||
XContentType xContentType = XContentFactory.xContentType(source);
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(xContentType);
|
||||
builder.copyCurrentStructure(parser);
|
||||
safeUpsertRequest().source(builder);
|
||||
} else if ("doc".equals(currentFieldName)) {
|
||||
XContentType xContentType = XContentFactory.xContentType(source);
|
||||
XContentBuilder docBuilder = XContentFactory.contentBuilder(xContentType);
|
||||
docBuilder.copyCurrentStructure(parser);
|
||||
safeDoc().source(docBuilder);
|
||||
} else if ("doc_as_upsert".equals(currentFieldName)) {
|
||||
docAsUpsert(parser.booleanValue());
|
||||
} else if ("detect_noop".equals(currentFieldName)) {
|
||||
detectNoop(parser.booleanValue());
|
||||
} else if ("fields".equals(currentFieldName)) {
|
||||
List<Object> fields = null;
|
||||
if (token == XContentParser.Token.START_ARRAY) {
|
||||
fields = (List) parser.list();
|
||||
} else if (token.isValue()) {
|
||||
fields = Collections.singletonList(parser.text());
|
||||
}
|
||||
if (fields != null) {
|
||||
fields(fields.toArray(new String[fields.size()]));
|
||||
}
|
||||
} else if ("_source".equals(currentFieldName)) {
|
||||
fetchSourceContext = FetchSourceContext.parse(parser);
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token == null) {
|
||||
return this;
|
||||
}
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if ("script".equals(currentFieldName)) {
|
||||
script = Script.parse(parser, ParseFieldMatcher.EMPTY);
|
||||
} else if ("scripted_upsert".equals(currentFieldName)) {
|
||||
scriptedUpsert = parser.booleanValue();
|
||||
} else if ("upsert".equals(currentFieldName)) {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType());
|
||||
builder.copyCurrentStructure(parser);
|
||||
safeUpsertRequest().source(builder);
|
||||
} else if ("doc".equals(currentFieldName)) {
|
||||
XContentBuilder docBuilder = XContentFactory.contentBuilder(parser.contentType());
|
||||
docBuilder.copyCurrentStructure(parser);
|
||||
safeDoc().source(docBuilder);
|
||||
} else if ("doc_as_upsert".equals(currentFieldName)) {
|
||||
docAsUpsert(parser.booleanValue());
|
||||
} else if ("detect_noop".equals(currentFieldName)) {
|
||||
detectNoop(parser.booleanValue());
|
||||
} else if ("fields".equals(currentFieldName)) {
|
||||
List<Object> fields = null;
|
||||
if (token == XContentParser.Token.START_ARRAY) {
|
||||
fields = (List) parser.list();
|
||||
} else if (token.isValue()) {
|
||||
fields = Collections.singletonList(parser.text());
|
||||
}
|
||||
if (fields != null) {
|
||||
fields(fields.toArray(new String[fields.size()]));
|
||||
}
|
||||
} else if ("_source".equals(currentFieldName)) {
|
||||
fetchSourceContext = FetchSourceContext.parse(parser);
|
||||
}
|
||||
if (script != null) {
|
||||
this.script = script;
|
||||
}
|
||||
}
|
||||
if (script != null) {
|
||||
this.script = script;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,87 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.xcontent;
|
||||
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
/**
|
||||
* A set of static methods to get {@link Token} from {@link XContentParser}
|
||||
* while checking for their types and throw {@link ParsingException} if needed.
|
||||
*/
|
||||
public final class XContentParserUtils {
|
||||
|
||||
private XContentParserUtils() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes sure that current token is of type {@link XContentParser.Token#FIELD_NAME}
|
||||
*
|
||||
* @return the token
|
||||
* @throws ParsingException if the token is not of type {@link XContentParser.Token#FIELD_NAME}
|
||||
*/
|
||||
public static Token ensureFieldName(Token token, Supplier<XContentLocation> location) throws IOException {
|
||||
return ensureType(Token.FIELD_NAME, token, location);
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes sure that current token is of type {@link XContentParser.Token#FIELD_NAME} and the the field name is equal to the provided one
|
||||
*
|
||||
* @return the token
|
||||
* @throws ParsingException if the token is not of type {@link XContentParser.Token#FIELD_NAME} or is not equal to the given
|
||||
* field name
|
||||
*/
|
||||
public static Token ensureFieldName(XContentParser parser, Token token, String fieldName) throws IOException {
|
||||
Token t = ensureType(Token.FIELD_NAME, token, parser::getTokenLocation);
|
||||
|
||||
String current = parser.currentName() != null ? parser.currentName() : "<null>";
|
||||
if (current.equals(fieldName) == false) {
|
||||
String message = "Failed to parse object: expecting field with name [%s] but found [%s]";
|
||||
throw new ParsingException(parser.getTokenLocation(), String.format(Locale.ROOT, message, fieldName, current));
|
||||
}
|
||||
return t;
|
||||
}
|
||||
|
||||
/**
|
||||
* @throws ParsingException with a "unknown field found" reason
|
||||
*/
|
||||
public static void throwUnknownField(String field, XContentLocation location) {
|
||||
String message = "Failed to parse object: unknown field [%s] found";
|
||||
throw new ParsingException(location, String.format(Locale.ROOT, message, field));
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes sure that current token is of the expected type
|
||||
*
|
||||
* @return the token
|
||||
* @throws ParsingException if the token is not equal to the expected type
|
||||
*/
|
||||
private static Token ensureType(Token expected, Token current, Supplier<XContentLocation> location) {
|
||||
if (current != expected) {
|
||||
String message = "Failed to parse object: expecting token of type [%s] but found [%s]";
|
||||
throw new ParsingException(location.get(), String.format(Locale.ROOT, message, expected, current));
|
||||
}
|
||||
return current;
|
||||
}
|
||||
}
|
|
@ -468,18 +468,26 @@ public class NodeJoinController extends AbstractComponent {
|
|||
|
||||
private ClusterState.Builder becomeMasterAndTrimConflictingNodes(ClusterState currentState, List<DiscoveryNode> joiningNodes) {
|
||||
assert currentState.nodes().getMasterNodeId() == null : currentState;
|
||||
DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(currentState.nodes());
|
||||
DiscoveryNodes currentNodes = currentState.nodes();
|
||||
DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(currentNodes);
|
||||
nodesBuilder.masterNodeId(currentState.nodes().getLocalNodeId());
|
||||
ClusterBlocks clusterBlocks = ClusterBlocks.builder().blocks(currentState.blocks())
|
||||
.removeGlobalBlock(discoverySettings.getNoMasterBlock()).build();
|
||||
for (final DiscoveryNode joiningNode : joiningNodes) {
|
||||
final DiscoveryNode existingNode = nodesBuilder.get(joiningNode.getId());
|
||||
if (existingNode != null && existingNode.equals(joiningNode) == false) {
|
||||
logger.debug("removing existing node [{}], which conflicts with incoming join from [{}]", existingNode, joiningNode);
|
||||
nodesBuilder.remove(existingNode.getId());
|
||||
final DiscoveryNode nodeWithSameId = nodesBuilder.get(joiningNode.getId());
|
||||
if (nodeWithSameId != null && nodeWithSameId.equals(joiningNode) == false) {
|
||||
logger.debug("removing existing node [{}], which conflicts with incoming join from [{}]", nodeWithSameId, joiningNode);
|
||||
nodesBuilder.remove(nodeWithSameId.getId());
|
||||
}
|
||||
final DiscoveryNode nodeWithSameAddress = currentNodes.findByAddress(joiningNode.getAddress());
|
||||
if (nodeWithSameAddress != null && nodeWithSameAddress.equals(joiningNode) == false) {
|
||||
logger.debug("removing existing node [{}], which conflicts with incoming join from [{}]", nodeWithSameAddress,
|
||||
joiningNode);
|
||||
nodesBuilder.remove(nodeWithSameAddress.getId());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// now trim any left over dead nodes - either left there when the previous master stepped down
|
||||
// or removed by us above
|
||||
ClusterState tmpState = ClusterState.builder(currentState).nodes(nodesBuilder).blocks(clusterBlocks).build();
|
||||
|
|
|
@ -327,6 +327,11 @@ public class DocumentMapper implements ToXContent {
|
|||
*/
|
||||
public DocumentMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
|
||||
Mapping updated = this.mapping.updateFieldType(fullNameToFieldType);
|
||||
if (updated == this.mapping) {
|
||||
// no change
|
||||
return this;
|
||||
}
|
||||
assert updated == updated.updateFieldType(fullNameToFieldType) : "updateFieldType operation is not idempotent";
|
||||
return new DocumentMapper(mapperService, updated);
|
||||
}
|
||||
|
||||
|
|
|
@ -93,7 +93,7 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
|
|||
// is the update even legal?
|
||||
checkCompatibility(type, fieldMapper, updateAllTypes);
|
||||
|
||||
if (fieldType != fullNameFieldType) {
|
||||
if (fieldType.equals(fullNameFieldType) == false) {
|
||||
fullName = fullName.copyAndPut(fieldType.name(), fieldMapper.fieldType());
|
||||
}
|
||||
|
||||
|
|
|
@ -104,12 +104,22 @@ public final class Mapping implements ToXContent {
|
|||
* Recursively update sub field types.
|
||||
*/
|
||||
public Mapping updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
|
||||
final MetadataFieldMapper[] updatedMeta = Arrays.copyOf(metadataMappers, metadataMappers.length);
|
||||
for (int i = 0; i < updatedMeta.length; ++i) {
|
||||
updatedMeta[i] = (MetadataFieldMapper) updatedMeta[i].updateFieldType(fullNameToFieldType);
|
||||
MetadataFieldMapper[] updatedMeta = null;
|
||||
for (int i = 0; i < metadataMappers.length; ++i) {
|
||||
MetadataFieldMapper currentFieldMapper = metadataMappers[i];
|
||||
MetadataFieldMapper updatedFieldMapper = (MetadataFieldMapper) currentFieldMapper.updateFieldType(fullNameToFieldType);
|
||||
if (updatedFieldMapper != currentFieldMapper) {
|
||||
if (updatedMeta == null) {
|
||||
updatedMeta = Arrays.copyOf(metadataMappers, metadataMappers.length);
|
||||
}
|
||||
updatedMeta[i] = updatedFieldMapper;
|
||||
}
|
||||
}
|
||||
RootObjectMapper updatedRoot = root.updateFieldType(fullNameToFieldType);
|
||||
return new Mapping(indexCreated, updatedRoot, updatedMeta, meta);
|
||||
if (updatedMeta == null && updatedRoot == root) {
|
||||
return this;
|
||||
}
|
||||
return new Mapping(indexCreated, updatedRoot, updatedMeta == null ? metadataMappers : updatedMeta, meta);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.rest;
|
||||
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.Booleans;
|
||||
import org.elasticsearch.common.CheckedConsumer;
|
||||
|
@ -229,6 +228,30 @@ public abstract class RestRequest implements ToXContent.Params {
|
|||
return params;
|
||||
}
|
||||
|
||||
/**
|
||||
* A parser for the contents of this request if there is a body, otherwise throws an {@link ElasticsearchParseException}. Use
|
||||
* {@link #applyContentParser(CheckedConsumer)} if you want to gracefully handle when the request doesn't have any contents. Use
|
||||
* {@link #contentOrSourceParamParser()} for requests that support specifying the request body in the {@code source} param.
|
||||
*/
|
||||
public final XContentParser contentParser() throws IOException {
|
||||
BytesReference content = content();
|
||||
if (content.length() == 0) {
|
||||
throw new ElasticsearchParseException("Body required");
|
||||
}
|
||||
return XContentFactory.xContent(content).createParser(content);
|
||||
}
|
||||
|
||||
/**
|
||||
* If there is any content then call {@code applyParser} with the parser, otherwise do nothing.
|
||||
*/
|
||||
public final void applyContentParser(CheckedConsumer<XContentParser, IOException> applyParser) throws IOException {
|
||||
if (hasContent()) {
|
||||
try (XContentParser parser = contentParser()) {
|
||||
applyParser.accept(parser);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Does this request have content or a {@code source} parameter? Use this instead of {@link #hasContent()} if this
|
||||
* {@linkplain RestHandler} treats the {@code source} parameter like the body content.
|
||||
|
@ -256,16 +279,13 @@ public abstract class RestRequest implements ToXContent.Params {
|
|||
* back to the user when there isn't request content.
|
||||
*/
|
||||
public final void withContentOrSourceParamParserOrNull(CheckedConsumer<XContentParser, IOException> withParser) throws IOException {
|
||||
XContentParser parser = null;
|
||||
BytesReference content = contentOrSourceParam();
|
||||
if (content.length() > 0) {
|
||||
parser = XContentFactory.xContent(content).createParser(content);
|
||||
}
|
||||
|
||||
try {
|
||||
withParser.accept(parser);
|
||||
} finally {
|
||||
IOUtils.close(parser);
|
||||
try (XContentParser parser = XContentFactory.xContent(content).createParser(content)) {
|
||||
withParser.accept(parser);
|
||||
}
|
||||
} else {
|
||||
withParser.accept(null);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -121,11 +121,7 @@ public class RestClusterRerouteAction extends BaseRestHandler {
|
|||
clusterRerouteRequest.timeout(request.paramAsTime("timeout", clusterRerouteRequest.timeout()));
|
||||
clusterRerouteRequest.setRetryFailed(request.paramAsBoolean("retry_failed", clusterRerouteRequest.isRetryFailed()));
|
||||
clusterRerouteRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterRerouteRequest.masterNodeTimeout()));
|
||||
if (request.hasContent()) {
|
||||
try (XContentParser parser = XContentHelper.createParser(request.content())) {
|
||||
PARSER.parse(parser, clusterRerouteRequest, new ParseContext(registry, parseFieldMatcher));
|
||||
}
|
||||
}
|
||||
request.applyContentParser(parser -> PARSER.parse(parser, clusterRerouteRequest, new ParseContext(registry, parseFieldMatcher)));
|
||||
return clusterRerouteRequest;
|
||||
}
|
||||
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.elasticsearch.client.node.NodeClient;
|
|||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -52,7 +51,7 @@ public class RestClusterUpdateSettingsAction extends BaseRestHandler {
|
|||
clusterUpdateSettingsRequest.masterNodeTimeout(
|
||||
request.paramAsTime("master_timeout", clusterUpdateSettingsRequest.masterNodeTimeout()));
|
||||
Map<String, Object> source;
|
||||
try (XContentParser parser = XContentFactory.xContent(request.content()).createParser(request.content())) {
|
||||
try (XContentParser parser = request.contentParser()) {
|
||||
source = parser.map();
|
||||
}
|
||||
if (source.containsKey("transient")) {
|
||||
|
|
|
@ -49,7 +49,7 @@ public class RestCreateSnapshotAction extends BaseRestHandler {
|
|||
@Override
|
||||
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
||||
CreateSnapshotRequest createSnapshotRequest = createSnapshotRequest(request.param("repository"), request.param("snapshot"));
|
||||
createSnapshotRequest.source(request.content().utf8ToString());
|
||||
request.applyContentParser(p -> createSnapshotRequest.source(p.mapOrdered()));
|
||||
createSnapshotRequest.masterNodeTimeout(request.paramAsTime("master_timeout", createSnapshotRequest.masterNodeTimeout()));
|
||||
createSnapshotRequest.waitForCompletion(request.paramAsBoolean("wait_for_completion", false));
|
||||
return channel -> client.admin().cluster().createSnapshot(createSnapshotRequest, new RestToXContentListener<>(channel));
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequ
|
|||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
|
@ -50,7 +51,9 @@ public class RestPutRepositoryAction extends BaseRestHandler {
|
|||
@Override
|
||||
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
||||
PutRepositoryRequest putRepositoryRequest = putRepositoryRequest(request.param("repository"));
|
||||
putRepositoryRequest.source(request.content().utf8ToString());
|
||||
try (XContentParser parser = request.contentParser()) {
|
||||
putRepositoryRequest.source(parser.mapOrdered());
|
||||
}
|
||||
putRepositoryRequest.verify(request.paramAsBoolean("verify", true));
|
||||
putRepositoryRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putRepositoryRequest.masterNodeTimeout()));
|
||||
putRepositoryRequest.timeout(request.paramAsTime("timeout", putRepositoryRequest.timeout()));
|
||||
|
|
|
@ -49,7 +49,7 @@ public class RestRestoreSnapshotAction extends BaseRestHandler {
|
|||
RestoreSnapshotRequest restoreSnapshotRequest = restoreSnapshotRequest(request.param("repository"), request.param("snapshot"));
|
||||
restoreSnapshotRequest.masterNodeTimeout(request.paramAsTime("master_timeout", restoreSnapshotRequest.masterNodeTimeout()));
|
||||
restoreSnapshotRequest.waitForCompletion(request.paramAsBoolean("wait_for_completion", false));
|
||||
restoreSnapshotRequest.source(request.content().utf8ToString());
|
||||
request.applyContentParser(p -> restoreSnapshotRequest.source(p.mapOrdered()));
|
||||
return channel -> client.admin().cluster().restoreSnapshot(restoreSnapshotRequest, new RestToXContentListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,7 +41,6 @@ public class RestCreateIndexAction extends BaseRestHandler {
|
|||
controller.registerHandler(RestRequest.Method.PUT, "/{index}", this);
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
@Override
|
||||
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
||||
CreateIndexRequest createIndexRequest = new CreateIndexRequest(request.param("index"));
|
||||
|
|
|
@ -67,7 +67,7 @@ public class RestIndexPutAliasAction extends BaseRestHandler {
|
|||
String searchRouting = null;
|
||||
|
||||
if (request.hasContent()) {
|
||||
try (XContentParser parser = XContentFactory.xContent(request.content()).createParser(request.content())) {
|
||||
try (XContentParser parser = request.contentParser()) {
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token == null) {
|
||||
throw new IllegalArgumentException("No index alias is specified");
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.rest.action.admin.indices;
|
|||
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
|
@ -29,7 +28,6 @@ import org.elasticsearch.common.ParseFieldMatcherSupplier;
|
|||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -61,7 +59,7 @@ public class RestIndicesAliasesAction extends BaseRestHandler {
|
|||
IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest();
|
||||
indicesAliasesRequest.masterNodeTimeout(request.paramAsTime("master_timeout", indicesAliasesRequest.masterNodeTimeout()));
|
||||
indicesAliasesRequest.timeout(request.paramAsTime("timeout", indicesAliasesRequest.timeout()));
|
||||
try (XContentParser parser = XContentFactory.xContent(request.content()).createParser(request.content())) {
|
||||
try (XContentParser parser = request.contentParser()) {
|
||||
PARSER.parse(parser, indicesAliasesRequest, () -> ParseFieldMatcher.STRICT);
|
||||
}
|
||||
if (indicesAliasesRequest.getAliasActions().isEmpty()) {
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.rest.action.admin.indices;
|
|||
import org.elasticsearch.action.admin.indices.rollover.RolloverRequest;
|
||||
import org.elasticsearch.action.support.ActiveShardCount;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
|
@ -40,13 +41,10 @@ public class RestRolloverIndexAction extends BaseRestHandler {
|
|||
controller.registerHandler(RestRequest.Method.POST, "/{index}/_rollover/{new_index}", this);
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
@Override
|
||||
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
||||
RolloverRequest rolloverIndexRequest = new RolloverRequest(request.param("index"), request.param("new_index"));
|
||||
if (request.hasContent()) {
|
||||
rolloverIndexRequest.source(request.content());
|
||||
}
|
||||
request.applyContentParser(parser -> RolloverRequest.PARSER.parse(parser, rolloverIndexRequest, () -> ParseFieldMatcher.EMPTY));
|
||||
rolloverIndexRequest.dryRun(request.paramAsBoolean("dry_run", false));
|
||||
rolloverIndexRequest.timeout(request.paramAsTime("timeout", rolloverIndexRequest.timeout()));
|
||||
rolloverIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", rolloverIndexRequest.masterNodeTimeout()));
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.elasticsearch.action.admin.indices.shrink.ShrinkRequest;
|
|||
import org.elasticsearch.action.admin.indices.shrink.ShrinkResponse;
|
||||
import org.elasticsearch.action.support.ActiveShardCount;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -42,7 +43,6 @@ public class RestShrinkIndexAction extends BaseRestHandler {
|
|||
controller.registerHandler(RestRequest.Method.POST, "/{index}/_shrink/{target}", this);
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
@Override
|
||||
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
||||
if (request.param("target") == null) {
|
||||
|
@ -52,9 +52,7 @@ public class RestShrinkIndexAction extends BaseRestHandler {
|
|||
throw new IllegalArgumentException("no source index");
|
||||
}
|
||||
ShrinkRequest shrinkIndexRequest = new ShrinkRequest(request.param("target"), request.param("index"));
|
||||
if (request.hasContent()) {
|
||||
shrinkIndexRequest.source(request.content());
|
||||
}
|
||||
request.applyContentParser(parser -> ShrinkRequest.PARSER.parse(parser, shrinkIndexRequest, () -> ParseFieldMatcher.EMPTY));
|
||||
shrinkIndexRequest.timeout(request.paramAsTime("timeout", shrinkIndexRequest.timeout()));
|
||||
shrinkIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", shrinkIndexRequest.masterNodeTimeout()));
|
||||
shrinkIndexRequest.setWaitForActiveShards(ActiveShardCount.parseString(request.param("wait_for_active_shards")));
|
||||
|
|
|
@ -80,9 +80,8 @@ public class RestUpdateAction extends BaseRestHandler {
|
|||
updateRequest.versionType(VersionType.fromString(request.param("version_type"), updateRequest.versionType()));
|
||||
|
||||
|
||||
// see if we have it in the body
|
||||
if (request.hasContent()) {
|
||||
updateRequest.fromXContent(request.content());
|
||||
request.applyContentParser(parser -> {
|
||||
updateRequest.fromXContent(parser);
|
||||
IndexRequest upsertRequest = updateRequest.upsertRequest();
|
||||
if (upsertRequest != null) {
|
||||
upsertRequest.routing(request.param("routing"));
|
||||
|
@ -97,7 +96,7 @@ public class RestUpdateAction extends BaseRestHandler {
|
|||
doc.version(RestActions.parseVersion(request));
|
||||
doc.versionType(VersionType.fromString(request.param("version_type"), doc.versionType()));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return channel ->
|
||||
client.update(updateRequest, new RestStatusToXContentListener<>(channel, r -> r.getLocation(updateRequest.routing())));
|
||||
|
|
|
@ -56,10 +56,14 @@ public class RestSuggestAction extends BaseRestHandler {
|
|||
SearchRequestParsers searchRequestParsers) {
|
||||
super(settings);
|
||||
this.searchRequestParsers = searchRequestParsers;
|
||||
controller.registerHandler(POST, "/_suggest", this);
|
||||
controller.registerHandler(GET, "/_suggest", this);
|
||||
controller.registerHandler(POST, "/{index}/_suggest", this);
|
||||
controller.registerHandler(GET, "/{index}/_suggest", this);
|
||||
controller.registerAsDeprecatedHandler(POST, "/_suggest", this,
|
||||
"[POST /_suggest] is deprecated! Use [POST /_search] instead.", deprecationLogger);
|
||||
controller.registerAsDeprecatedHandler(GET, "/_suggest", this,
|
||||
"[GET /_suggest] is deprecated! Use [GET /_search] instead.", deprecationLogger);
|
||||
controller.registerAsDeprecatedHandler(POST, "/{index}/_suggest", this,
|
||||
"[POST /{index}/_suggest] is deprecated! Use [POST /{index}/_search] instead.", deprecationLogger);
|
||||
controller.registerAsDeprecatedHandler(GET, "/{index}/_suggest", this,
|
||||
"[GET /{index}/_suggest] is deprecated! Use [GET /{index}/_search] instead.", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -20,8 +20,6 @@ package org.elasticsearch.transport;
|
|||
|
||||
import com.carrotsearch.hppc.IntHashSet;
|
||||
import com.carrotsearch.hppc.IntSet;
|
||||
import com.carrotsearch.hppc.LongObjectHashMap;
|
||||
import com.carrotsearch.hppc.LongObjectMap;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
@ -88,7 +86,9 @@ import java.util.Iterator;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
@ -180,7 +180,7 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
|
|||
private final String transportName;
|
||||
protected final ConnectionProfile defaultConnectionProfile;
|
||||
|
||||
private final LongObjectMap<TransportResponseHandler<?>> pendingHandshakes = new LongObjectHashMap<>();
|
||||
private final ConcurrentMap<Long, HandshakeResponseHandler> pendingHandshakes = new ConcurrentHashMap<>();
|
||||
private final AtomicLong requestIdGenerator = new AtomicLong();
|
||||
private final CounterMetric numHandshakes = new CounterMetric();
|
||||
private static final String HANDSHAKE_ACTION_NAME = "internal:tcp/handshake";
|
||||
|
@ -242,6 +242,51 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
|
|||
this.transportServiceAdapter = service;
|
||||
}
|
||||
|
||||
private static class HandshakeResponseHandler<Channel> implements TransportResponseHandler<VersionHandshakeResponse> {
|
||||
final AtomicReference<Version> versionRef = new AtomicReference<>();
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
final AtomicBoolean handshakeNotSupported = new AtomicBoolean(false);
|
||||
final AtomicReference<Exception> exceptionRef = new AtomicReference<>();
|
||||
final Channel channel;
|
||||
|
||||
public HandshakeResponseHandler(Channel channel) {
|
||||
this.channel = channel;
|
||||
}
|
||||
|
||||
@Override
|
||||
public VersionHandshakeResponse newInstance() {
|
||||
return new VersionHandshakeResponse();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleResponse(VersionHandshakeResponse response) {
|
||||
final boolean success = versionRef.compareAndSet(null, response.version);
|
||||
assert success;
|
||||
latch.countDown();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleException(TransportException exp) {
|
||||
Throwable cause = exp.getCause();
|
||||
if (cause != null
|
||||
&& cause instanceof ActionNotFoundTransportException
|
||||
// this will happen if we talk to a node (pre 5.2) that doesn't have a handshake handler
|
||||
// we will just treat the node as a 5.0.0 node unless the discovery node that is used to connect has a higher version.
|
||||
&& cause.getMessage().equals("No handler for action [internal:tcp/handshake]")) {
|
||||
handshakeNotSupported.set(true);
|
||||
} else {
|
||||
final boolean success = exceptionRef.compareAndSet(null, exp);
|
||||
assert success;
|
||||
}
|
||||
latch.countDown();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String executor() {
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
}
|
||||
|
||||
public class ScheduledPing extends AbstractLifecycleRunnable {
|
||||
|
||||
/**
|
||||
|
@ -462,9 +507,17 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
|
|||
|
||||
@Override
|
||||
public final NodeChannels openConnection(DiscoveryNode node, ConnectionProfile profile) throws IOException {
|
||||
NodeChannels nodeChannels = connectToChannels(node, profile);
|
||||
transportServiceAdapter.onConnectionOpened(node);
|
||||
return nodeChannels;
|
||||
try {
|
||||
NodeChannels nodeChannels = connectToChannels(node, profile);
|
||||
transportServiceAdapter.onConnectionOpened(node);
|
||||
return nodeChannels;
|
||||
} catch (ConnectTransportException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
// ConnectTransportExceptions are handled specifically on the caller end - we wrap the actual exception to ensure
|
||||
// only relevant exceptions are logged on the caller end.. this is the same as in connectToNode
|
||||
throw new ConnectTransportException(node, "general node connection failure", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1466,47 +1519,12 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
|
|||
|
||||
// pkg private for testing
|
||||
final Version executeHandshake(DiscoveryNode node, Channel channel, TimeValue timeout) throws IOException, InterruptedException {
|
||||
CountDownLatch latch = new CountDownLatch(1);
|
||||
AtomicReference<Version> versionRef = new AtomicReference<>();
|
||||
AtomicReference<Exception> exceptionRef = new AtomicReference<>();
|
||||
AtomicBoolean handshakeNotSupported = new AtomicBoolean(false);
|
||||
numHandshakes.inc();
|
||||
final long requestId = newRequestId();
|
||||
pendingHandshakes.put(requestId, new TransportResponseHandler<VersionHandshakeResponse>() {
|
||||
|
||||
@Override
|
||||
public VersionHandshakeResponse newInstance() {
|
||||
return new VersionHandshakeResponse();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleResponse(VersionHandshakeResponse response) {
|
||||
final boolean success = versionRef.compareAndSet(null, response.version);
|
||||
assert success;
|
||||
latch.countDown();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleException(TransportException exp) {
|
||||
Throwable cause = exp.getCause();
|
||||
if (cause != null
|
||||
&& cause instanceof ActionNotFoundTransportException
|
||||
// this will happen if we talk to a node (pre 5.2) that doesn't haven a handshake handler
|
||||
// we will just treat the node as a 5.0.0 node unless the discovery node that is used to connect has a higher version.
|
||||
&& cause.getMessage().equals("No handler for action [internal:tcp/handshake]")) {
|
||||
handshakeNotSupported.set(true);
|
||||
} else {
|
||||
final boolean success = exceptionRef.compareAndSet(null, exp);
|
||||
assert success;
|
||||
}
|
||||
latch.countDown();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String executor() {
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
});
|
||||
final HandshakeResponseHandler handler = new HandshakeResponseHandler(channel);
|
||||
AtomicReference<Version> versionRef = handler.versionRef;
|
||||
AtomicReference<Exception> exceptionRef = handler.exceptionRef;
|
||||
pendingHandshakes.put(requestId, handler);
|
||||
boolean success = false;
|
||||
try {
|
||||
// for the request we use the minCompatVersion since we don't know what's the version of the node we talk to
|
||||
|
@ -1515,11 +1533,11 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
|
|||
final Version minCompatVersion = getCurrentVersion().minimumCompatibilityVersion();
|
||||
sendRequestToChannel(node, channel, requestId, HANDSHAKE_ACTION_NAME, TransportRequest.Empty.INSTANCE,
|
||||
TransportRequestOptions.EMPTY, minCompatVersion, TransportStatus.setHandshake((byte)0));
|
||||
if (latch.await(timeout.millis(), TimeUnit.MILLISECONDS) == false) {
|
||||
if (handler.latch.await(timeout.millis(), TimeUnit.MILLISECONDS) == false) {
|
||||
throw new ConnectTransportException(node, "handshake_timeout[" + timeout + "]");
|
||||
}
|
||||
success = true;
|
||||
if (handshakeNotSupported.get()) {
|
||||
if (handler.handshakeNotSupported.get()) {
|
||||
// this is a BWC layer, if we talk to a pre 5.2 node then the handshake is not supported
|
||||
// this will go away in master once it's all ported to 5.2 but for now we keep this to make
|
||||
// the backport straight forward
|
||||
|
@ -1555,4 +1573,18 @@ public abstract class TcpTransport<Channel> extends AbstractLifecycleComponent i
|
|||
public long newRequestId() {
|
||||
return requestIdGenerator.incrementAndGet();
|
||||
}
|
||||
|
||||
/**
|
||||
* Called by sub-classes for each channel that is closed
|
||||
*/
|
||||
protected final void onChannelClosed(Channel channel) {
|
||||
Optional<Map.Entry<Long, HandshakeResponseHandler>> first = pendingHandshakes.entrySet().stream()
|
||||
.filter((entry) -> entry.getValue().channel == channel).findFirst();
|
||||
if(first.isPresent()) {
|
||||
final Long requestId = first.get().getKey();
|
||||
HandshakeResponseHandler handler = first.get().getValue();
|
||||
pendingHandshakes.remove(requestId);
|
||||
handler.handleException(new TransportException("connection reset"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,264 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch;
|
||||
|
||||
import org.elasticsearch.action.RoutingMissingException;
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.discovery.DiscoverySettings;
|
||||
import org.elasticsearch.index.shard.IndexShardRecoveringException;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.hamcrest.Matcher;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
import static java.util.Collections.singleton;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.CoreMatchers.hasItem;
|
||||
import static org.hamcrest.CoreMatchers.startsWith;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
|
||||
public class ElasticsearchExceptionTests extends ESTestCase {
|
||||
|
||||
public void testToXContent() throws IOException {
|
||||
ElasticsearchException e = new ElasticsearchException("test");
|
||||
assertExceptionAsJson(e, false, equalTo("{\"type\":\"exception\",\"reason\":\"test\"}"));
|
||||
|
||||
e = new IndexShardRecoveringException(new ShardId("_test", "_0", 5));
|
||||
assertExceptionAsJson(e, false, equalTo("{\"type\":\"index_shard_recovering_exception\"," +
|
||||
"\"reason\":\"CurrentState[RECOVERING] Already recovering\",\"index_uuid\":\"_0\",\"shard\":\"5\",\"index\":\"_test\"}"));
|
||||
|
||||
e = new BroadcastShardOperationFailedException(new ShardId("_index", "_uuid", 12), "foo", new IllegalStateException("bar"));
|
||||
assertExceptionAsJson(e, false, equalTo("{\"type\":\"illegal_state_exception\",\"reason\":\"bar\"}"));
|
||||
|
||||
e = new ElasticsearchException(new IllegalArgumentException("foo"));
|
||||
assertExceptionAsJson(e, false, equalTo("{\"type\":\"exception\",\"reason\":\"java.lang.IllegalArgumentException: foo\"," +
|
||||
"\"caused_by\":{\"type\":\"illegal_argument_exception\",\"reason\":\"foo\"}}"));
|
||||
|
||||
e = new ElasticsearchException("foo", new IllegalStateException("bar"));
|
||||
assertExceptionAsJson(e, false, equalTo("{\"type\":\"exception\",\"reason\":\"foo\"," +
|
||||
"\"caused_by\":{\"type\":\"illegal_state_exception\",\"reason\":\"bar\"}}"));
|
||||
|
||||
// Test the same exception but with the "rest.exception.stacktrace.skip" parameter disabled: the stack_trace must be present
|
||||
// in the JSON. Since the stack can be large, it only checks the beginning of the JSON.
|
||||
assertExceptionAsJson(e, true, startsWith("{\"type\":\"exception\",\"reason\":\"foo\"," +
|
||||
"\"caused_by\":{\"type\":\"illegal_state_exception\",\"reason\":\"bar\"," +
|
||||
"\"stack_trace\":\"java.lang.IllegalStateException: bar"));
|
||||
}
|
||||
|
||||
public void testToXContentWithHeaders() throws IOException {
|
||||
ElasticsearchException e = new ElasticsearchException("foo",
|
||||
new ElasticsearchException("bar",
|
||||
new ElasticsearchException("baz",
|
||||
new ClusterBlockException(singleton(DiscoverySettings.NO_MASTER_BLOCK_WRITES)))));
|
||||
e.addHeader("foo_0", "0");
|
||||
e.addHeader("foo_1", "1");
|
||||
e.addHeader("es.header_foo_0", "foo_0");
|
||||
e.addHeader("es.header_foo_1", "foo_1");
|
||||
|
||||
final String expectedJson = "{"
|
||||
+ "\"type\":\"exception\","
|
||||
+ "\"reason\":\"foo\","
|
||||
+ "\"header_foo_0\":\"foo_0\","
|
||||
+ "\"header_foo_1\":\"foo_1\","
|
||||
+ "\"caused_by\":{"
|
||||
+ "\"type\":\"exception\","
|
||||
+ "\"reason\":\"bar\","
|
||||
+ "\"caused_by\":{"
|
||||
+ "\"type\":\"exception\","
|
||||
+ "\"reason\":\"baz\","
|
||||
+ "\"caused_by\":{"
|
||||
+ "\"type\":\"cluster_block_exception\","
|
||||
+ "\"reason\":\"blocked by: [SERVICE_UNAVAILABLE/2/no master];\""
|
||||
+ "}"
|
||||
+ "}"
|
||||
+ "},"
|
||||
+ "\"header\":{"
|
||||
+ "\"foo_0\":\"0\","
|
||||
+ "\"foo_1\":\"1\""
|
||||
+ "}"
|
||||
+ "}";
|
||||
|
||||
assertExceptionAsJson(e, false, equalTo(expectedJson));
|
||||
|
||||
ElasticsearchException parsed;
|
||||
try (XContentParser parser = XContentType.JSON.xContent().createParser(expectedJson)) {
|
||||
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
|
||||
parsed = ElasticsearchException.fromXContent(parser);
|
||||
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
|
||||
assertNotNull(parsed);
|
||||
assertEquals(parsed.getMessage(), "Elasticsearch exception [type=exception, reason=foo]");
|
||||
assertThat(parsed.getHeaderKeys(), hasSize(4));
|
||||
assertEquals(parsed.getHeader("header_foo_0").get(0), "foo_0");
|
||||
assertEquals(parsed.getHeader("header_foo_1").get(0), "foo_1");
|
||||
assertEquals(parsed.getHeader("foo_0").get(0), "0");
|
||||
assertEquals(parsed.getHeader("foo_1").get(0), "1");
|
||||
|
||||
ElasticsearchException cause = (ElasticsearchException) parsed.getCause();
|
||||
assertEquals(cause.getMessage(), "Elasticsearch exception [type=exception, reason=bar]");
|
||||
|
||||
cause = (ElasticsearchException) cause.getCause();
|
||||
assertEquals(cause.getMessage(), "Elasticsearch exception [type=exception, reason=baz]");
|
||||
|
||||
cause = (ElasticsearchException) cause.getCause();
|
||||
assertEquals(cause.getMessage(),
|
||||
"Elasticsearch exception [type=cluster_block_exception, reason=blocked by: [SERVICE_UNAVAILABLE/2/no master];]");
|
||||
}
|
||||
|
||||
public void testFromXContent() throws IOException {
|
||||
final XContent xContent = randomFrom(XContentType.values()).xContent();
|
||||
XContentBuilder builder = XContentBuilder.builder(xContent)
|
||||
.startObject()
|
||||
.field("type", "foo")
|
||||
.field("reason", "something went wrong")
|
||||
.field("stack_trace", "...")
|
||||
.endObject();
|
||||
|
||||
ElasticsearchException parsed;
|
||||
try (XContentParser parser = xContent.createParser(builder.bytes())) {
|
||||
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
|
||||
parsed = ElasticsearchException.fromXContent(parser);
|
||||
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
|
||||
assertNotNull(parsed);
|
||||
assertEquals(parsed.getMessage(), "Elasticsearch exception [type=foo, reason=something went wrong, stack_trace=...]");
|
||||
}
|
||||
|
||||
public void testFromXContentWithCause() throws IOException {
|
||||
ElasticsearchException e = new ElasticsearchException("foo",
|
||||
new ElasticsearchException("bar",
|
||||
new ElasticsearchException("baz",
|
||||
new RoutingMissingException("_test", "_type", "_id"))));
|
||||
|
||||
final XContent xContent = randomFrom(XContentType.values()).xContent();
|
||||
XContentBuilder builder = XContentBuilder.builder(xContent).startObject().value(e).endObject();
|
||||
|
||||
ElasticsearchException parsed;
|
||||
try (XContentParser parser = xContent.createParser(builder.bytes())) {
|
||||
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
|
||||
parsed = ElasticsearchException.fromXContent(parser);
|
||||
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
|
||||
assertNotNull(parsed);
|
||||
assertEquals(parsed.getMessage(), "Elasticsearch exception [type=exception, reason=foo]");
|
||||
|
||||
ElasticsearchException cause = (ElasticsearchException) parsed.getCause();
|
||||
assertEquals(cause.getMessage(), "Elasticsearch exception [type=exception, reason=bar]");
|
||||
|
||||
cause = (ElasticsearchException) cause.getCause();
|
||||
assertEquals(cause.getMessage(), "Elasticsearch exception [type=exception, reason=baz]");
|
||||
|
||||
cause = (ElasticsearchException) cause.getCause();
|
||||
assertEquals(cause.getMessage(),
|
||||
"Elasticsearch exception [type=routing_missing_exception, reason=routing is required for [_test]/[_type]/[_id]]");
|
||||
assertThat(cause.getHeaderKeys(), hasSize(2));
|
||||
assertThat(cause.getHeader("index"), hasItem("_test"));
|
||||
assertThat(cause.getHeader("index_uuid"), hasItem("_na_"));
|
||||
}
|
||||
|
||||
public void testFromXContentWithHeaders() throws IOException {
|
||||
RoutingMissingException routing = new RoutingMissingException("_test", "_type", "_id");
|
||||
ElasticsearchException baz = new ElasticsearchException("baz", routing);
|
||||
baz.addHeader("baz_0", "baz0");
|
||||
baz.addHeader("es.baz_1", "baz1");
|
||||
baz.addHeader("baz_2", "baz2");
|
||||
baz.addHeader("es.baz_3", "baz3");
|
||||
ElasticsearchException bar = new ElasticsearchException("bar", baz);
|
||||
bar.addHeader("es.bar_0", "bar0");
|
||||
bar.addHeader("bar_1", "bar1");
|
||||
bar.addHeader("es.bar_2", "bar2");
|
||||
ElasticsearchException foo = new ElasticsearchException("foo", bar);
|
||||
foo.addHeader("es.foo_0", "foo0");
|
||||
foo.addHeader("foo_1", "foo1");
|
||||
|
||||
final XContent xContent = randomFrom(XContentType.values()).xContent();
|
||||
XContentBuilder builder = XContentBuilder.builder(xContent).startObject().value(foo).endObject();
|
||||
|
||||
ElasticsearchException parsed;
|
||||
try (XContentParser parser = xContent.createParser(builder.bytes())) {
|
||||
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
|
||||
parsed = ElasticsearchException.fromXContent(parser);
|
||||
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
|
||||
assertNotNull(parsed);
|
||||
assertEquals(parsed.getMessage(), "Elasticsearch exception [type=exception, reason=foo]");
|
||||
assertThat(parsed.getHeaderKeys(), hasSize(2));
|
||||
assertThat(parsed.getHeader("foo_0"), hasItem("foo0"));
|
||||
assertThat(parsed.getHeader("foo_1"), hasItem("foo1"));
|
||||
|
||||
ElasticsearchException cause = (ElasticsearchException) parsed.getCause();
|
||||
assertEquals(cause.getMessage(), "Elasticsearch exception [type=exception, reason=bar]");
|
||||
assertThat(cause.getHeaderKeys(), hasSize(3));
|
||||
assertThat(cause.getHeader("bar_0"), hasItem("bar0"));
|
||||
assertThat(cause.getHeader("bar_1"), hasItem("bar1"));
|
||||
assertThat(cause.getHeader("bar_2"), hasItem("bar2"));
|
||||
|
||||
cause = (ElasticsearchException) cause.getCause();
|
||||
assertEquals(cause.getMessage(), "Elasticsearch exception [type=exception, reason=baz]");
|
||||
assertThat(cause.getHeaderKeys(), hasSize(4));
|
||||
assertThat(cause.getHeader("baz_0"), hasItem("baz0"));
|
||||
assertThat(cause.getHeader("baz_1"), hasItem("baz1"));
|
||||
assertThat(cause.getHeader("baz_2"), hasItem("baz2"));
|
||||
assertThat(cause.getHeader("baz_3"), hasItem("baz3"));
|
||||
|
||||
cause = (ElasticsearchException) cause.getCause();
|
||||
assertEquals(cause.getMessage(),
|
||||
"Elasticsearch exception [type=routing_missing_exception, reason=routing is required for [_test]/[_type]/[_id]]");
|
||||
assertThat(cause.getHeaderKeys(), hasSize(2));
|
||||
assertThat(cause.getHeader("index"), hasItem("_test"));
|
||||
assertThat(cause.getHeader("index_uuid"), hasItem("_na_"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a {@link ToXContent} using a JSON XContentBuilder and check the resulting string with the given {@link Matcher}.
|
||||
*
|
||||
* By default, the stack trace of the exception is not rendered. The parameter `errorTrace` forces the stack trace to
|
||||
* be rendered like the REST API does when the "error_trace" parameter is set to true.
|
||||
*/
|
||||
private static void assertExceptionAsJson(ElasticsearchException e, boolean errorTrace, Matcher<String> expected)
|
||||
throws IOException {
|
||||
ToXContent.Params params = ToXContent.EMPTY_PARAMS;
|
||||
if (errorTrace) {
|
||||
params = new ToXContent.MapParams(Collections.singletonMap(ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE, "false"));
|
||||
}
|
||||
try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) {
|
||||
builder.startObject();
|
||||
e.toXContent(builder, params);
|
||||
builder.endObject();
|
||||
assertThat(builder.bytes().utf8ToString(), expected);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.action.admin.indices.exists;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.discovery.MasterNotDiscoveredException;
|
||||
import org.elasticsearch.discovery.zen.ElectMasterService;
|
||||
import org.elasticsearch.gateway.GatewayService;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
|
@ -36,7 +37,9 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThro
|
|||
public class IndicesExistsIT extends ESIntegTestCase {
|
||||
|
||||
public void testIndexExistsWithBlocksInPlace() throws IOException {
|
||||
Settings settings = Settings.builder().put(GatewayService.RECOVER_AFTER_NODES_SETTING.getKey(), 99).build();
|
||||
Settings settings = Settings.builder()
|
||||
.put(GatewayService.RECOVER_AFTER_NODES_SETTING.getKey(), 99)
|
||||
.put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 1).build();
|
||||
String node = internalCluster().startNode(settings);
|
||||
|
||||
assertThrows(client(node).admin().indices().prepareExists("test").setMasterNodeTimeout(TimeValue.timeValueSeconds(0)),
|
||||
|
|
|
@ -19,9 +19,11 @@
|
|||
|
||||
package org.elasticsearch.action.admin.indices.rollover;
|
||||
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.Set;
|
||||
|
@ -39,7 +41,7 @@ public class RolloverRequestTests extends ESTestCase {
|
|||
.field("max_docs", 100)
|
||||
.endObject()
|
||||
.endObject();
|
||||
request.source(builder.bytes());
|
||||
RolloverRequest.PARSER.parse(XContentHelper.createParser(builder.bytes()), request, () -> ParseFieldMatcher.EMPTY);
|
||||
Set<Condition> conditions = request.getConditions();
|
||||
assertThat(conditions.size(), equalTo(2));
|
||||
for (Condition condition : conditions) {
|
||||
|
@ -80,7 +82,7 @@ public class RolloverRequestTests extends ESTestCase {
|
|||
.startObject("alias1").endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
request.source(builder.bytes());
|
||||
RolloverRequest.PARSER.parse(XContentHelper.createParser(builder.bytes()), request, () -> ParseFieldMatcher.EMPTY);
|
||||
Set<Condition> conditions = request.getConditions();
|
||||
assertThat(conditions.size(), equalTo(2));
|
||||
assertThat(request.getCreateIndexRequest().mappings().size(), equalTo(1));
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.action.fieldstats;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.StreamsUtils;
|
||||
|
||||
|
@ -39,7 +39,7 @@ public class FieldStatsRequestTests extends ESTestCase {
|
|||
StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/fieldstats/" +
|
||||
"fieldstats-index-constraints-request.json"));
|
||||
FieldStatsRequest request = new FieldStatsRequest();
|
||||
request.source(XContentFactory.xContent(data).createParser(data));
|
||||
request.source(createParser(JsonXContent.jsonXContent, data));
|
||||
|
||||
assertThat(request.getFields().length, equalTo(5));
|
||||
assertThat(request.getFields()[0], equalTo("field1"));
|
||||
|
|
|
@ -19,10 +19,21 @@
|
|||
|
||||
package org.elasticsearch.action.support.replication;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.RoutingMissingException;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.shard.IndexShardRecoveringException;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.EqualsHashCodeTestUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
|
||||
public class ReplicationResponseTests extends ESTestCase {
|
||||
|
@ -36,4 +47,97 @@ public class ReplicationResponseTests extends ESTestCase {
|
|||
equalTo(String.format(Locale.ROOT, "ShardInfo{total=5, successful=%d, failures=[]}", successful)));
|
||||
}
|
||||
|
||||
public void testShardInfoEqualsAndHashcode() {
|
||||
EqualsHashCodeTestUtils.CopyFunction<ReplicationResponse.ShardInfo> copy = shardInfo ->
|
||||
new ReplicationResponse.ShardInfo(shardInfo.getTotal(), shardInfo.getSuccessful(), shardInfo.getFailures());
|
||||
|
||||
EqualsHashCodeTestUtils.MutateFunction<ReplicationResponse.ShardInfo> mutate = shardInfo -> {
|
||||
List<Supplier<ReplicationResponse.ShardInfo>> mutations = new ArrayList<>();
|
||||
mutations.add(() ->
|
||||
new ReplicationResponse.ShardInfo(shardInfo.getTotal() + 1, shardInfo.getSuccessful(), shardInfo.getFailures()));
|
||||
mutations.add(() ->
|
||||
new ReplicationResponse.ShardInfo(shardInfo.getTotal(), shardInfo.getSuccessful() + 1, shardInfo.getFailures()));
|
||||
mutations.add(() -> {
|
||||
int nbFailures = randomIntBetween(1, 5);
|
||||
return new ReplicationResponse.ShardInfo(shardInfo.getTotal(), shardInfo.getSuccessful(), randomFailures(nbFailures));
|
||||
});
|
||||
return randomFrom(mutations).get();
|
||||
};
|
||||
|
||||
checkEqualsAndHashCode(randomShardInfo(), copy, mutate);
|
||||
}
|
||||
|
||||
public void testFailureEqualsAndHashcode() {
|
||||
EqualsHashCodeTestUtils.CopyFunction<ReplicationResponse.ShardInfo.Failure> copy = failure -> {
|
||||
Index index = failure.fullShardId().getIndex();
|
||||
ShardId shardId = new ShardId(index.getName(), index.getUUID(), failure.shardId());
|
||||
Exception cause = (Exception) failure.getCause();
|
||||
return new ReplicationResponse.ShardInfo.Failure(shardId, failure.nodeId(), cause, failure.status(), failure.primary());
|
||||
};
|
||||
|
||||
EqualsHashCodeTestUtils.MutateFunction<ReplicationResponse.ShardInfo.Failure> mutate = failure -> {
|
||||
List<Supplier<ReplicationResponse.ShardInfo.Failure>> mutations = new ArrayList<>();
|
||||
|
||||
final Index index = failure.fullShardId().getIndex();
|
||||
final ShardId randomIndex = new ShardId(randomUnicodeOfCodepointLength(5), index.getUUID(), failure.shardId());
|
||||
mutations.add(() -> new ReplicationResponse.ShardInfo.Failure(randomIndex, failure.nodeId(), (Exception) failure.getCause(),
|
||||
failure.status(), failure.primary()));
|
||||
|
||||
final ShardId randomUUID = new ShardId(index.getName(), randomUnicodeOfCodepointLength(5), failure.shardId());
|
||||
mutations.add(() -> new ReplicationResponse.ShardInfo.Failure(randomUUID, failure.nodeId(), (Exception) failure.getCause(),
|
||||
failure.status(), failure.primary()));
|
||||
|
||||
final ShardId randomShardId = new ShardId(index.getName(),index.getUUID(), failure.shardId() + randomIntBetween(1, 3));
|
||||
mutations.add(() -> new ReplicationResponse.ShardInfo.Failure(randomShardId, failure.nodeId(), (Exception) failure.getCause(),
|
||||
failure.status(), failure.primary()));
|
||||
|
||||
final String randomNode = randomUnicodeOfLength(3);
|
||||
mutations.add(() -> new ReplicationResponse.ShardInfo.Failure(failure.fullShardId(), randomNode, (Exception) failure.getCause(),
|
||||
failure.status(), failure.primary()));
|
||||
|
||||
final Exception randomException = randomFrom(new IllegalStateException("a"), new IllegalArgumentException("b"));
|
||||
mutations.add(() -> new ReplicationResponse.ShardInfo.Failure(failure.fullShardId(), failure.nodeId(), randomException,
|
||||
failure.status(), failure.primary()));
|
||||
|
||||
final RestStatus randomStatus = randomFrom(RestStatus.values());
|
||||
mutations.add(() -> new ReplicationResponse.ShardInfo.Failure(failure.fullShardId(), failure.nodeId(),
|
||||
(Exception) failure.getCause(), randomStatus, failure.primary()));
|
||||
|
||||
final boolean randomPrimary = !failure.primary();
|
||||
mutations.add(() -> new ReplicationResponse.ShardInfo.Failure(failure.fullShardId(), failure.nodeId(),
|
||||
(Exception) failure.getCause(), failure.status(), randomPrimary));
|
||||
|
||||
return randomFrom(mutations).get();
|
||||
};
|
||||
|
||||
checkEqualsAndHashCode(randomFailure(), copy, mutate);
|
||||
}
|
||||
|
||||
private static ReplicationResponse.ShardInfo randomShardInfo() {
|
||||
int total = randomIntBetween(1, 10);
|
||||
int successful = randomIntBetween(0, total);
|
||||
return new ReplicationResponse.ShardInfo(total, successful, randomFailures(Math.max(0, (total - successful))));
|
||||
}
|
||||
|
||||
private static ReplicationResponse.ShardInfo.Failure[] randomFailures(int nbFailures) {
|
||||
List<ReplicationResponse.ShardInfo.Failure> randomFailures = new ArrayList<>(nbFailures);
|
||||
for (int i = 0; i < nbFailures; i++) {
|
||||
randomFailures.add(randomFailure());
|
||||
}
|
||||
return randomFailures.toArray(new ReplicationResponse.ShardInfo.Failure[nbFailures]);
|
||||
}
|
||||
|
||||
private static ReplicationResponse.ShardInfo.Failure randomFailure() {
|
||||
return new ReplicationResponse.ShardInfo.Failure(
|
||||
new ShardId(randomAsciiOfLength(5), randomAsciiOfLength(5), randomIntBetween(0, 5)),
|
||||
randomAsciiOfLength(3),
|
||||
randomFrom(
|
||||
new IndexShardRecoveringException(new ShardId("_test", "_0", 5)),
|
||||
new ElasticsearchException(new IllegalArgumentException("argument is wrong")),
|
||||
new RoutingMissingException("_test", "_type", "_id")
|
||||
),
|
||||
randomFrom(RestStatus.values()),
|
||||
randomBoolean()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,13 +19,11 @@
|
|||
|
||||
package org.elasticsearch.action.update;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
@ -36,8 +34,8 @@ import org.elasticsearch.script.Script;
|
|||
import org.elasticsearch.script.ScriptContextRegistry;
|
||||
import org.elasticsearch.script.ScriptEngineRegistry;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.script.ScriptSettings;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
|
||||
|
@ -52,17 +50,16 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
|||
import static org.hamcrest.Matchers.arrayContaining;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class UpdateRequestTests extends ESTestCase {
|
||||
public void testUpdateRequest() throws Exception {
|
||||
UpdateRequest request = new UpdateRequest("test", "type", "1");
|
||||
// simple script
|
||||
request.fromXContent(XContentFactory.jsonBuilder().startObject()
|
||||
.field("script", "script1")
|
||||
.endObject());
|
||||
request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("script", "script1")
|
||||
.endObject().bytes()));
|
||||
Script script = request.script();
|
||||
assertThat(script, notNullValue());
|
||||
assertThat(script.getIdOrCode(), equalTo("script1"));
|
||||
|
@ -72,9 +69,9 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
assertThat(params, equalTo(Collections.emptyMap()));
|
||||
|
||||
// simple verbose script
|
||||
request.fromXContent(XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("script").field("inline", "script1").endObject()
|
||||
.endObject());
|
||||
request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("script").field("inline", "script1").endObject()
|
||||
.endObject().bytes()));
|
||||
script = request.script();
|
||||
assertThat(script, notNullValue());
|
||||
assertThat(script.getIdOrCode(), equalTo("script1"));
|
||||
|
@ -85,13 +82,13 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
|
||||
// script with params
|
||||
request = new UpdateRequest("test", "type", "1");
|
||||
request.fromXContent(XContentFactory.jsonBuilder().startObject()
|
||||
request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("script")
|
||||
.field("inline", "script1")
|
||||
.startObject("params")
|
||||
.field("param1", "value1")
|
||||
.endObject()
|
||||
.endObject().endObject());
|
||||
.endObject().endObject().bytes()));
|
||||
script = request.script();
|
||||
assertThat(script, notNullValue());
|
||||
assertThat(script.getIdOrCode(), equalTo("script1"));
|
||||
|
@ -103,9 +100,15 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
assertThat(params.get("param1").toString(), equalTo("value1"));
|
||||
|
||||
request = new UpdateRequest("test", "type", "1");
|
||||
request.fromXContent(XContentFactory.jsonBuilder().startObject().startObject("script")
|
||||
.startObject("params").field("param1", "value1").endObject()
|
||||
.field("inline", "script1").endObject().endObject());
|
||||
request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("script")
|
||||
.startObject("params")
|
||||
.field("param1", "value1")
|
||||
.endObject()
|
||||
.field("inline", "script1")
|
||||
.endObject()
|
||||
.endObject().bytes()));
|
||||
script = request.script();
|
||||
assertThat(script, notNullValue());
|
||||
assertThat(script.getIdOrCode(), equalTo("script1"));
|
||||
|
@ -118,7 +121,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
|
||||
// script with params and upsert
|
||||
request = new UpdateRequest("test", "type", "1");
|
||||
request.fromXContent(XContentFactory.jsonBuilder().startObject()
|
||||
request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("script")
|
||||
.startObject("params")
|
||||
.field("param1", "value1")
|
||||
|
@ -130,7 +133,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
.startObject("compound")
|
||||
.field("field2", "value2")
|
||||
.endObject()
|
||||
.endObject().endObject());
|
||||
.endObject().endObject().bytes()));
|
||||
script = request.script();
|
||||
assertThat(script, notNullValue());
|
||||
assertThat(script.getIdOrCode(), equalTo("script1"));
|
||||
|
@ -145,7 +148,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
assertThat(((Map) upsertDoc.get("compound")).get("field2").toString(), equalTo("value2"));
|
||||
|
||||
request = new UpdateRequest("test", "type", "1");
|
||||
request.fromXContent(XContentFactory.jsonBuilder().startObject()
|
||||
request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("upsert")
|
||||
.field("field1", "value1")
|
||||
.startObject("compound")
|
||||
|
@ -157,7 +160,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
.field("param1", "value1")
|
||||
.endObject()
|
||||
.field("inline", "script1")
|
||||
.endObject().endObject());
|
||||
.endObject().endObject().bytes()));
|
||||
script = request.script();
|
||||
assertThat(script, notNullValue());
|
||||
assertThat(script.getIdOrCode(), equalTo("script1"));
|
||||
|
@ -173,69 +176,70 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
|
||||
// script with doc
|
||||
request = new UpdateRequest("test", "type", "1");
|
||||
request.fromXContent(XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("doc").field("field1", "value1").startObject("compound")
|
||||
.field("field2", "value2").endObject().endObject().endObject());
|
||||
request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("doc")
|
||||
.field("field1", "value1")
|
||||
.startObject("compound")
|
||||
.field("field2", "value2")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().bytes()));
|
||||
Map<String, Object> doc = request.doc().sourceAsMap();
|
||||
assertThat(doc.get("field1").toString(), equalTo("value1"));
|
||||
assertThat(((Map) doc.get("compound")).get("field2").toString(), equalTo("value2"));
|
||||
}
|
||||
|
||||
// Related to issue #15822
|
||||
public void testInvalidBodyThrowsParseException() throws Exception {
|
||||
UpdateRequest request = new UpdateRequest("test", "type", "1");
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> request.fromXContent(new byte[] { (byte) '"' }));
|
||||
assertThat(e.getMessage(), equalTo("Failed to derive xcontent"));
|
||||
}
|
||||
|
||||
// Related to issue 15338
|
||||
public void testFieldsParsing() throws Exception {
|
||||
UpdateRequest request = new UpdateRequest("test", "type1", "1")
|
||||
.fromXContent(new BytesArray("{\"doc\": {\"field1\": \"value1\"}, \"fields\": \"_source\"}"));
|
||||
.fromXContent(XContentHelper.createParser(new BytesArray("{\"doc\": {\"field1\": \"value1\"}, \"fields\": \"_source\"}")));
|
||||
assertThat(request.doc().sourceAsMap().get("field1").toString(), equalTo("value1"));
|
||||
assertThat(request.fields(), arrayContaining("_source"));
|
||||
|
||||
request = new UpdateRequest("test", "type2", "2")
|
||||
.fromXContent(new BytesArray("{\"doc\": {\"field2\": \"value2\"}, \"fields\": [\"field1\", \"field2\"]}"));
|
||||
request = new UpdateRequest("test", "type2", "2").fromXContent(
|
||||
XContentHelper.createParser(new BytesArray("{\"doc\": {\"field2\": \"value2\"}, \"fields\": [\"field1\", \"field2\"]}")));
|
||||
assertThat(request.doc().sourceAsMap().get("field2").toString(), equalTo("value2"));
|
||||
assertThat(request.fields(), arrayContaining("field1", "field2"));
|
||||
}
|
||||
|
||||
public void testFetchSourceParsing() throws Exception {
|
||||
UpdateRequest request = new UpdateRequest("test", "type1", "1");
|
||||
request.fromXContent(
|
||||
XContentFactory.jsonBuilder().startObject().field("_source", true).endObject()
|
||||
);
|
||||
request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("_source", true)
|
||||
.endObject().bytes()));
|
||||
assertThat(request.fetchSource(), notNullValue());
|
||||
assertThat(request.fetchSource().includes().length, equalTo(0));
|
||||
assertThat(request.fetchSource().excludes().length, equalTo(0));
|
||||
assertThat(request.fetchSource().fetchSource(), equalTo(true));
|
||||
|
||||
request.fromXContent(
|
||||
XContentFactory.jsonBuilder().startObject().field("_source", false).endObject()
|
||||
);
|
||||
request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("_source", false)
|
||||
.endObject().bytes()));
|
||||
assertThat(request.fetchSource(), notNullValue());
|
||||
assertThat(request.fetchSource().includes().length, equalTo(0));
|
||||
assertThat(request.fetchSource().excludes().length, equalTo(0));
|
||||
assertThat(request.fetchSource().fetchSource(), equalTo(false));
|
||||
|
||||
request.fromXContent(
|
||||
XContentFactory.jsonBuilder().startObject().field("_source", "path.inner.*").endObject()
|
||||
);
|
||||
request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("_source", "path.inner.*")
|
||||
.endObject().bytes()));
|
||||
assertThat(request.fetchSource(), notNullValue());
|
||||
assertThat(request.fetchSource().fetchSource(), equalTo(true));
|
||||
assertThat(request.fetchSource().includes().length, equalTo(1));
|
||||
assertThat(request.fetchSource().excludes().length, equalTo(0));
|
||||
assertThat(request.fetchSource().includes()[0], equalTo("path.inner.*"));
|
||||
|
||||
request.fromXContent(
|
||||
XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("_source")
|
||||
.field("includes", "path.inner.*")
|
||||
.field("excludes", "another.inner.*")
|
||||
.endObject()
|
||||
.endObject()
|
||||
);
|
||||
request.fromXContent(XContentHelper.createParser(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("_source")
|
||||
.field("includes", "path.inner.*")
|
||||
.field("excludes", "another.inner.*")
|
||||
.endObject()
|
||||
.endObject().bytes()));
|
||||
assertThat(request.fetchSource(), notNullValue());
|
||||
assertThat(request.fetchSource().fetchSource(), equalTo(true));
|
||||
assertThat(request.fetchSource().includes().length, equalTo(1));
|
||||
|
@ -254,13 +258,17 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>();
|
||||
scripts.put("ctx._source.update_timestamp = ctx._now",
|
||||
(vars) -> {
|
||||
Map<String, Object> ctx = (Map) vars.get("ctx");
|
||||
Map<String, Object> source = (Map) ctx.get("_source");
|
||||
Map<String, Object> vars2 = vars;
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> ctx = (Map<String, Object>) vars2.get("ctx");
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> source = (Map<String, Object>) ctx.get("_source");
|
||||
source.put("update_timestamp", ctx.get("_now"));
|
||||
return null;});
|
||||
scripts.put("ctx._timestamp = ctx._now",
|
||||
(vars) -> {
|
||||
Map<String, Object> ctx = (Map) vars.get("ctx");
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> ctx = (Map<String, Object>) vars.get("ctx");
|
||||
ctx.put("_timestamp", ctx.get("_now"));
|
||||
return null;});
|
||||
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
|
||||
|
|
|
@ -268,7 +268,7 @@ public class MinimumMasterNodesIT extends ESIntegTestCase {
|
|||
public void testDynamicUpdateMinimumMasterNodes() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "400ms")
|
||||
.put("discovery.initial_state_timeout", "500ms")
|
||||
.put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), "1")
|
||||
.build();
|
||||
|
||||
logger.info("--> start first node and wait for it to be a master");
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.common.xcontent;
|
|||
|
||||
import com.fasterxml.jackson.core.JsonGenerationException;
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
|
@ -268,7 +269,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
final byte[] randomBytes = randomBytes();
|
||||
BytesReference bytes = builder().startObject().field("binary", randomBytes).endObject().bytes();
|
||||
|
||||
XContentParser parser = xcontentType().xContent().createParser(bytes);
|
||||
XContentParser parser = createParser(xcontentType().xContent(), bytes);
|
||||
assertSame(parser.nextToken(), Token.START_OBJECT);
|
||||
assertSame(parser.nextToken(), Token.FIELD_NAME);
|
||||
assertEquals(parser.currentName(), "binary");
|
||||
|
@ -284,7 +285,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
final byte[] randomBytes = randomBytes();
|
||||
BytesReference bytes = builder().startObject().field("binary").value(randomBytes).endObject().bytes();
|
||||
|
||||
XContentParser parser = xcontentType().xContent().createParser(bytes);
|
||||
XContentParser parser = createParser(xcontentType().xContent(), bytes);
|
||||
assertSame(parser.nextToken(), Token.START_OBJECT);
|
||||
assertSame(parser.nextToken(), Token.FIELD_NAME);
|
||||
assertEquals(parser.currentName(), "binary");
|
||||
|
@ -309,7 +310,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
}
|
||||
builder.endObject();
|
||||
|
||||
XContentParser parser = xcontentType().xContent().createParser(builder.bytes());
|
||||
XContentParser parser = createParser(xcontentType().xContent(), builder.bytes());
|
||||
assertSame(parser.nextToken(), Token.START_OBJECT);
|
||||
assertSame(parser.nextToken(), Token.FIELD_NAME);
|
||||
assertEquals(parser.currentName(), "bin");
|
||||
|
@ -331,7 +332,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
}
|
||||
builder.endObject();
|
||||
|
||||
XContentParser parser = xcontentType().xContent().createParser(builder.bytes());
|
||||
XContentParser parser = createParser(xcontentType().xContent(), builder.bytes());
|
||||
assertSame(parser.nextToken(), Token.START_OBJECT);
|
||||
assertSame(parser.nextToken(), Token.FIELD_NAME);
|
||||
assertEquals(parser.currentName(), "utf8");
|
||||
|
@ -349,7 +350,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
final BytesReference random = new BytesArray(randomBytes());
|
||||
XContentBuilder builder = builder().startObject().field("text", new Text(random)).endObject();
|
||||
|
||||
XContentParser parser = xcontentType().xContent().createParser(builder.bytes());
|
||||
XContentParser parser = createParser(xcontentType().xContent(), builder.bytes());
|
||||
assertSame(parser.nextToken(), Token.START_OBJECT);
|
||||
assertSame(parser.nextToken(), Token.FIELD_NAME);
|
||||
assertEquals(parser.currentName(), "text");
|
||||
|
|
|
@ -0,0 +1,81 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.xcontent;
|
||||
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class XContentParserUtilsTests extends ESTestCase {
|
||||
|
||||
private XContentType xContentType;
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
xContentType = randomFrom(XContentType.values());
|
||||
}
|
||||
|
||||
public void testEnsureFieldName() throws IOException {
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> {
|
||||
XContentParser parser = createParser(createBuilder().startObject().endObject().bytes());
|
||||
// Parser current token is null
|
||||
assertNull(parser.currentToken());
|
||||
XContentParserUtils.ensureFieldName(parser.currentToken(), parser::getTokenLocation);
|
||||
});
|
||||
assertThat(e.getMessage(), equalTo("Failed to parse object: expecting token of type [FIELD_NAME] but found [null]"));
|
||||
|
||||
e = expectThrows(ParsingException.class, () -> {
|
||||
XContentParser parser = createParser(createBuilder().startObject().field("foo", "bar").endObject().bytes());
|
||||
// Parser next token is a start object
|
||||
XContentParserUtils.ensureFieldName(parser.nextToken(), parser::getTokenLocation);
|
||||
});
|
||||
assertThat(e.getMessage(), equalTo("Failed to parse object: expecting token of type [FIELD_NAME] but found [START_OBJECT]"));
|
||||
|
||||
e = expectThrows(ParsingException.class, () -> {
|
||||
XContentParser parser = createParser(createBuilder().startObject().field("foo", "bar").endObject().bytes());
|
||||
// Moves to start object
|
||||
assertThat(parser.nextToken(), is(XContentParser.Token.START_OBJECT));
|
||||
// Expected field name is "foo", not "test"
|
||||
XContentParserUtils.ensureFieldName(parser, parser.nextToken(), "test");
|
||||
});
|
||||
assertThat(e.getMessage(), equalTo("Failed to parse object: expecting field with name [test] but found [foo]"));
|
||||
|
||||
// Everything is fine
|
||||
final String randomFieldName = randomAsciiOfLength(5);
|
||||
XContentParser parser = createParser(createBuilder().startObject().field(randomFieldName, 0).endObject().bytes());
|
||||
assertThat(parser.nextToken(), is(XContentParser.Token.START_OBJECT));
|
||||
XContentParserUtils.ensureFieldName(parser, parser.nextToken(), randomFieldName);
|
||||
}
|
||||
|
||||
private XContentBuilder createBuilder() throws IOException {
|
||||
return XContentBuilder.builder(xContentType.xContent());
|
||||
}
|
||||
|
||||
private XContentParser createParser(BytesReference bytes) throws IOException {
|
||||
return xContentType.xContent().createParser(bytes);
|
||||
}
|
||||
}
|
|
@ -626,12 +626,15 @@ public class NodeJoinControllerTests extends ESTestCase {
|
|||
|
||||
setState(clusterService, stateBuilder.build());
|
||||
|
||||
final DiscoveryNode restartedNode = new DiscoveryNode(otherNode.getId(),
|
||||
randomBoolean() ? otherNode.getAddress() : buildNewFakeTransportAddress(), otherNode.getAttributes(),
|
||||
otherNode.getRoles(), Version.CURRENT);
|
||||
// conflict on node id or address
|
||||
final DiscoveryNode conflictingNode = randomBoolean() ?
|
||||
new DiscoveryNode(otherNode.getId(), randomBoolean() ? otherNode.getAddress() : buildNewFakeTransportAddress(),
|
||||
otherNode.getAttributes(), otherNode.getRoles(), Version.CURRENT) :
|
||||
new DiscoveryNode("conflicting_address_node", otherNode.getAddress(), otherNode.getAttributes(), otherNode.getRoles(),
|
||||
Version.CURRENT);
|
||||
|
||||
nodeJoinController.startElectionContext();
|
||||
final SimpleFuture joinFuture = joinNodeAsync(restartedNode);
|
||||
final SimpleFuture joinFuture = joinNodeAsync(conflictingNode);
|
||||
final CountDownLatch elected = new CountDownLatch(1);
|
||||
nodeJoinController.waitToBeElectedAsMaster(1, TimeValue.timeValueHours(5), new NodeJoinController.ElectionCallback() {
|
||||
@Override
|
||||
|
@ -655,9 +658,9 @@ public class NodeJoinControllerTests extends ESTestCase {
|
|||
assertTrue(finalNodes.isLocalNodeElectedMaster());
|
||||
assertThat(finalNodes.getLocalNode(), equalTo(masterNode));
|
||||
assertThat(finalNodes.getSize(), equalTo(2));
|
||||
assertThat(finalNodes.get(restartedNode.getId()), equalTo(restartedNode));
|
||||
assertThat(finalNodes.get(conflictingNode.getId()), equalTo(conflictingNode));
|
||||
List<ShardRouting> activeShardsOnRestartedNode =
|
||||
StreamSupport.stream(finalState.getRoutingNodes().node(restartedNode.getId()).spliterator(), false)
|
||||
StreamSupport.stream(finalState.getRoutingNodes().node(conflictingNode.getId()).spliterator(), false)
|
||||
.filter(ShardRouting::active).collect(Collectors.toList());
|
||||
assertThat(activeShardsOnRestartedNode, empty());
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.cluster.block.ClusterBlock;
|
|||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.discovery.zen.ElectMasterService;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
|
@ -49,20 +50,22 @@ public class RecoverAfterNodesIT extends ESIntegTestCase {
|
|||
return blocks;
|
||||
}
|
||||
|
||||
public Client startNode(Settings.Builder settings) {
|
||||
String name = internalCluster().startNode(settings);
|
||||
public Client startNode(Settings.Builder settings, int minMasterNodes) {
|
||||
String name = internalCluster().startNode(
|
||||
Settings.builder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minMasterNodes)
|
||||
.put(settings.build()));
|
||||
return internalCluster().client(name);
|
||||
}
|
||||
|
||||
public void testRecoverAfterNodes() throws Exception {
|
||||
logger.info("--> start node (1)");
|
||||
Client clientNode1 = startNode(Settings.builder().put("gateway.recover_after_nodes", 3));
|
||||
Client clientNode1 = startNode(Settings.builder().put("gateway.recover_after_nodes", 3), 1);
|
||||
assertThat(clientNode1.admin().cluster().prepareState().setLocal(true).execute().actionGet()
|
||||
.getState().blocks().global(ClusterBlockLevel.METADATA_WRITE),
|
||||
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
|
||||
|
||||
logger.info("--> start node (2)");
|
||||
Client clientNode2 = startNode(Settings.builder().put("gateway.recover_after_nodes", 3));
|
||||
Client clientNode2 = startNode(Settings.builder().put("gateway.recover_after_nodes", 3), 1);
|
||||
Thread.sleep(BLOCK_WAIT_TIMEOUT.millis());
|
||||
assertThat(clientNode1.admin().cluster().prepareState().setLocal(true).execute().actionGet()
|
||||
.getState().blocks().global(ClusterBlockLevel.METADATA_WRITE),
|
||||
|
@ -72,7 +75,7 @@ public class RecoverAfterNodesIT extends ESIntegTestCase {
|
|||
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
|
||||
|
||||
logger.info("--> start node (3)");
|
||||
Client clientNode3 = startNode(Settings.builder().put("gateway.recover_after_nodes", 3));
|
||||
Client clientNode3 = startNode(Settings.builder().put("gateway.recover_after_nodes", 3), 1);
|
||||
|
||||
assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, clientNode1).isEmpty(), equalTo(true));
|
||||
assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, clientNode2).isEmpty(), equalTo(true));
|
||||
|
@ -81,13 +84,17 @@ public class RecoverAfterNodesIT extends ESIntegTestCase {
|
|||
|
||||
public void testRecoverAfterMasterNodes() throws Exception {
|
||||
logger.info("--> start master_node (1)");
|
||||
Client master1 = startNode(Settings.builder().put("gateway.recover_after_master_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true));
|
||||
Client master1 = startNode(Settings.builder()
|
||||
.put("gateway.recover_after_master_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), false)
|
||||
.put(Node.NODE_MASTER_SETTING.getKey(), true), 1);
|
||||
assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet()
|
||||
.getState().blocks().global(ClusterBlockLevel.METADATA_WRITE),
|
||||
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
|
||||
|
||||
logger.info("--> start data_node (1)");
|
||||
Client data1 = startNode(Settings.builder().put("gateway.recover_after_master_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false));
|
||||
Client data1 = startNode(Settings.builder()
|
||||
.put("gateway.recover_after_master_nodes", 2)
|
||||
.put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false), 1);
|
||||
assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet()
|
||||
.getState().blocks().global(ClusterBlockLevel.METADATA_WRITE),
|
||||
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
|
||||
|
@ -96,7 +103,9 @@ public class RecoverAfterNodesIT extends ESIntegTestCase {
|
|||
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
|
||||
|
||||
logger.info("--> start data_node (2)");
|
||||
Client data2 = startNode(Settings.builder().put("gateway.recover_after_master_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false));
|
||||
Client data2 = startNode(Settings.builder()
|
||||
.put("gateway.recover_after_master_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), true)
|
||||
.put(Node.NODE_MASTER_SETTING.getKey(), false), 1);
|
||||
assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet()
|
||||
.getState().blocks().global(ClusterBlockLevel.METADATA_WRITE),
|
||||
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
|
||||
|
@ -108,7 +117,10 @@ public class RecoverAfterNodesIT extends ESIntegTestCase {
|
|||
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
|
||||
|
||||
logger.info("--> start master_node (2)");
|
||||
Client master2 = startNode(Settings.builder().put("gateway.recover_after_master_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true));
|
||||
Client master2 = startNode(Settings.builder()
|
||||
.put("gateway.recover_after_master_nodes", 2)
|
||||
.put(Node.NODE_DATA_SETTING.getKey(), false)
|
||||
.put(Node.NODE_MASTER_SETTING.getKey(), true), 1);
|
||||
assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, master1).isEmpty(), equalTo(true));
|
||||
assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, master2).isEmpty(), equalTo(true));
|
||||
assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, data1).isEmpty(), equalTo(true));
|
||||
|
@ -117,13 +129,19 @@ public class RecoverAfterNodesIT extends ESIntegTestCase {
|
|||
|
||||
public void testRecoverAfterDataNodes() throws Exception {
|
||||
logger.info("--> start master_node (1)");
|
||||
Client master1 = startNode(Settings.builder().put("gateway.recover_after_data_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true));
|
||||
Client master1 = startNode(Settings.builder()
|
||||
.put("gateway.recover_after_data_nodes", 2)
|
||||
.put(Node.NODE_DATA_SETTING.getKey(), false)
|
||||
.put(Node.NODE_MASTER_SETTING.getKey(), true), 1);
|
||||
assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet()
|
||||
.getState().blocks().global(ClusterBlockLevel.METADATA_WRITE),
|
||||
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
|
||||
|
||||
logger.info("--> start data_node (1)");
|
||||
Client data1 = startNode(Settings.builder().put("gateway.recover_after_data_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false));
|
||||
Client data1 = startNode(Settings.builder()
|
||||
.put("gateway.recover_after_data_nodes", 2)
|
||||
.put(Node.NODE_DATA_SETTING.getKey(), true)
|
||||
.put(Node.NODE_MASTER_SETTING.getKey(), false), 1);
|
||||
assertThat(master1.admin().cluster().prepareState().setLocal(true).execute().actionGet()
|
||||
.getState().blocks().global(ClusterBlockLevel.METADATA_WRITE),
|
||||
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
|
||||
|
@ -132,7 +150,10 @@ public class RecoverAfterNodesIT extends ESIntegTestCase {
|
|||
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
|
||||
|
||||
logger.info("--> start master_node (2)");
|
||||
Client master2 = startNode(Settings.builder().put("gateway.recover_after_data_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_MASTER_SETTING.getKey(), true));
|
||||
Client master2 = startNode(Settings.builder()
|
||||
.put("gateway.recover_after_data_nodes", 2)
|
||||
.put(Node.NODE_DATA_SETTING.getKey(), false)
|
||||
.put(Node.NODE_MASTER_SETTING.getKey(), true), 1);
|
||||
assertThat(master2.admin().cluster().prepareState().setLocal(true).execute().actionGet()
|
||||
.getState().blocks().global(ClusterBlockLevel.METADATA_WRITE),
|
||||
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
|
||||
|
@ -144,7 +165,10 @@ public class RecoverAfterNodesIT extends ESIntegTestCase {
|
|||
hasItem(GatewayService.STATE_NOT_RECOVERED_BLOCK));
|
||||
|
||||
logger.info("--> start data_node (2)");
|
||||
Client data2 = startNode(Settings.builder().put("gateway.recover_after_data_nodes", 2).put(Node.NODE_DATA_SETTING.getKey(), true).put(Node.NODE_MASTER_SETTING.getKey(), false));
|
||||
Client data2 = startNode(Settings.builder()
|
||||
.put("gateway.recover_after_data_nodes", 2)
|
||||
.put(Node.NODE_DATA_SETTING.getKey(), true)
|
||||
.put(Node.NODE_MASTER_SETTING.getKey(), false), 1);
|
||||
assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, master1).isEmpty(), equalTo(true));
|
||||
assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, master2).isEmpty(), equalTo(true));
|
||||
assertThat(waitForNoBlocksOnNode(BLOCK_WAIT_TIMEOUT, data1).isEmpty(), equalTo(true));
|
||||
|
|
|
@ -72,11 +72,12 @@ public class FieldTypeLookupTests extends ESTestCase {
|
|||
MockFieldMapper f = new MockFieldMapper("foo");
|
||||
MockFieldMapper f2 = new MockFieldMapper("foo");
|
||||
FieldTypeLookup lookup = new FieldTypeLookup();
|
||||
lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean());
|
||||
FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), randomBoolean());
|
||||
lookup = lookup.copyAndAddAll("type1", newList(f), true);
|
||||
FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), true);
|
||||
|
||||
assertSame(f2.fieldType(), lookup2.get("foo"));
|
||||
assertEquals(1, size(lookup2.iterator()));
|
||||
assertSame(f.fieldType(), lookup2.get("foo"));
|
||||
assertEquals(f2.fieldType(), lookup2.get("foo"));
|
||||
}
|
||||
|
||||
public void testAddExistingIndexName() {
|
||||
|
|
|
@ -19,16 +19,6 @@
|
|||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -39,8 +29,17 @@ import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
|||
import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.Matchers.hasToString;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
|
||||
|
@ -169,7 +168,6 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
|
|||
assertThat(mapperService.unmappedFieldType("string"), instanceOf(KeywordFieldType.class));
|
||||
}
|
||||
|
||||
|
||||
public void testMergeWithMap() throws Throwable {
|
||||
IndexService indexService1 = createIndex("index1");
|
||||
MapperService mapperService = indexService1.mapperService();
|
||||
|
@ -187,4 +185,34 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
|
|||
() -> mapperService.merge(mappings, false));
|
||||
assertThat(e.getMessage(), startsWith("Failed to parse mapping [type1]: "));
|
||||
}
|
||||
|
||||
public void testOtherDocumentMappersOnlyUpdatedWhenChangingFieldType() throws IOException {
|
||||
IndexService indexService = createIndex("test");
|
||||
|
||||
CompressedXContent simpleMapping = new CompressedXContent(XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("properties")
|
||||
.startObject("field")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject().endObject().bytes());
|
||||
|
||||
indexService.mapperService().merge("type1", simpleMapping, MergeReason.MAPPING_UPDATE, true);
|
||||
DocumentMapper documentMapper = indexService.mapperService().documentMapper("type1");
|
||||
|
||||
indexService.mapperService().merge("type2", simpleMapping, MergeReason.MAPPING_UPDATE, true);
|
||||
assertSame(indexService.mapperService().documentMapper("type1"), documentMapper);
|
||||
|
||||
CompressedXContent normsDisabledMapping = new CompressedXContent(XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("properties")
|
||||
.startObject("field")
|
||||
.field("type", "text")
|
||||
.startObject("norms")
|
||||
.field("enabled", false)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().bytes());
|
||||
|
||||
indexService.mapperService().merge("type3", normsDisabledMapping, MergeReason.MAPPING_UPDATE, true);
|
||||
assertNotSame(indexService.mapperService().documentMapper("type1"), documentMapper);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,11 +28,30 @@ import org.elasticsearch.test.ESTestCase;
|
|||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.singletonMap;
|
||||
|
||||
public class RestRequestTests extends ESTestCase {
|
||||
public void testContentParser() throws IOException {
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () ->
|
||||
new ContentRestRequest("", emptyMap()).contentParser());
|
||||
assertEquals("Body required", e.getMessage());
|
||||
e = expectThrows(ElasticsearchParseException.class, () ->
|
||||
new ContentRestRequest("", singletonMap("source", "{}")).contentParser());
|
||||
assertEquals("Body required", e.getMessage());
|
||||
assertEquals(emptyMap(), new ContentRestRequest("{}", emptyMap()).contentParser().map());
|
||||
}
|
||||
|
||||
public void testApplyContentParser() throws IOException {
|
||||
new ContentRestRequest("", emptyMap()).applyContentParser(p -> fail("Shouldn't have been called"));
|
||||
new ContentRestRequest("", singletonMap("source", "{}")).applyContentParser(p -> fail("Shouldn't have been called"));
|
||||
AtomicReference<Object> source = new AtomicReference<>();
|
||||
new ContentRestRequest("{}", emptyMap()).applyContentParser(p -> source.set(p.map()));
|
||||
assertEquals(emptyMap(), source.get());
|
||||
}
|
||||
|
||||
public void testContentOrSourceParam() throws IOException {
|
||||
assertEquals(BytesArray.EMPTY, new ContentRestRequest("", emptyMap()).contentOrSourceParam());
|
||||
assertEquals(new BytesArray("stuff"), new ContentRestRequest("stuff", emptyMap()).contentOrSourceParam());
|
||||
|
@ -47,15 +66,6 @@ public class RestRequestTests extends ESTestCase {
|
|||
assertEquals(true, new ContentRestRequest("", singletonMap("source", "stuff")).hasContentOrSourceParam());
|
||||
}
|
||||
|
||||
public void testContentOrSourceParamParserOrNull() throws IOException {
|
||||
new ContentRestRequest("", emptyMap()).withContentOrSourceParamParserOrNull(parser -> assertNull(parser));
|
||||
new ContentRestRequest("{}", emptyMap()).withContentOrSourceParamParserOrNull(parser -> assertEquals(emptyMap(), parser.map()));
|
||||
new ContentRestRequest("{}", singletonMap("source", "stuff2")).withContentOrSourceParamParserOrNull(parser ->
|
||||
assertEquals(emptyMap(), parser.map()));
|
||||
new ContentRestRequest("", singletonMap("source", "{}")).withContentOrSourceParamParserOrNull(parser ->
|
||||
assertEquals(emptyMap(), parser.map()));
|
||||
}
|
||||
|
||||
public void testContentOrSourceParamParser() throws IOException {
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () ->
|
||||
new ContentRestRequest("", emptyMap()).contentOrSourceParamParser());
|
||||
|
@ -65,6 +75,15 @@ public class RestRequestTests extends ESTestCase {
|
|||
assertEquals(emptyMap(), new ContentRestRequest("", singletonMap("source", "{}")).contentOrSourceParamParser().map());
|
||||
}
|
||||
|
||||
public void testWithContentOrSourceParamParserOrNull() throws IOException {
|
||||
new ContentRestRequest("", emptyMap()).withContentOrSourceParamParserOrNull(parser -> assertNull(parser));
|
||||
new ContentRestRequest("{}", emptyMap()).withContentOrSourceParamParserOrNull(parser -> assertEquals(emptyMap(), parser.map()));
|
||||
new ContentRestRequest("{}", singletonMap("source", "stuff2")).withContentOrSourceParamParserOrNull(parser ->
|
||||
assertEquals(emptyMap(), parser.map()));
|
||||
new ContentRestRequest("", singletonMap("source", "{}")).withContentOrSourceParamParserOrNull(parser ->
|
||||
assertEquals(emptyMap(), parser.map()));
|
||||
}
|
||||
|
||||
private static final class ContentRestRequest extends RestRequest {
|
||||
private final BytesArray content;
|
||||
public ContentRestRequest(String content, Map<String, String> params) {
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotR
|
|||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -76,10 +77,9 @@ public class SnapshotRequestsTests extends ESTestCase {
|
|||
builder.endArray();
|
||||
}
|
||||
|
||||
byte[] bytes = BytesReference.toBytes(builder.endObject().bytes());
|
||||
BytesReference bytes = builder.endObject().bytes();
|
||||
|
||||
|
||||
request.source(bytes);
|
||||
request.source(XContentHelper.createParser(bytes).mapOrdered());
|
||||
|
||||
assertEquals("test-repo", request.repository());
|
||||
assertEquals("test-snap", request.snapshot());
|
||||
|
@ -135,10 +135,9 @@ public class SnapshotRequestsTests extends ESTestCase {
|
|||
builder.endArray();
|
||||
}
|
||||
|
||||
byte[] bytes = BytesReference.toBytes(builder.endObject().bytes());
|
||||
BytesReference bytes = builder.endObject().bytes();
|
||||
|
||||
|
||||
request.source(bytes);
|
||||
request.source(XContentHelper.createParser(bytes).mapOrdered());
|
||||
|
||||
assertEquals("test-repo", request.repository());
|
||||
assertEquals("test-snap", request.snapshot());
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
package org.elasticsearch.threadpool;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -27,9 +26,7 @@ import org.elasticsearch.common.unit.SizeValue;
|
|||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
|
@ -81,11 +78,7 @@ public class ThreadPoolSerializationTests extends ESTestCase {
|
|||
info.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
|
||||
BytesReference bytesReference = builder.bytes();
|
||||
Map<String, Object> map;
|
||||
try (XContentParser parser = XContentFactory.xContent(bytesReference).createParser(bytesReference)) {
|
||||
map = parser.map();
|
||||
}
|
||||
Map<String, Object> map = XContentHelper.convertToMap(builder.bytes(), false).v2();
|
||||
assertThat(map, hasKey("foo"));
|
||||
map = (Map<String, Object>) map.get("foo");
|
||||
assertThat(map, hasKey("queue_size"));
|
||||
|
|
|
@ -6,7 +6,7 @@ See: https://github.com/elastic/docs
|
|||
Snippets marked with `// CONSOLE` are automatically annotated with "VIEW IN
|
||||
SENSE" in the documentation and are automatically tested by the command
|
||||
`gradle :docs:check`. To test just the docs from a single page, use e.g.
|
||||
`gradle :docs:check -Dtest.method=*rollover*`.
|
||||
`gradle :docs:check -Dtests.method=*rollover*`.
|
||||
|
||||
By default `// CONSOLE` snippet runs as its own isolated
|
||||
test. You can manipulate the test execution in the following ways:
|
||||
|
|
|
@ -5,8 +5,12 @@ The suggest feature suggests similar looking terms based on a provided
|
|||
text by using a suggester. Parts of the suggest feature are still under
|
||||
development.
|
||||
|
||||
The suggest request part is either defined alongside the query part in a
|
||||
`_search` request or via the REST `_suggest` endpoint.
|
||||
The suggest request part is defined alongside the query part in a `_search`
|
||||
request.
|
||||
|
||||
NOTE: `_suggest` endpoint has been deprecated in favour of using suggest via
|
||||
`_search` endpoint. In 5.0, the `_search` endpoint has been optimized for
|
||||
suggest only search requests.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
@ -30,25 +34,6 @@ POST twitter/_search
|
|||
// CONSOLE
|
||||
// TEST[setup:twitter]
|
||||
|
||||
Suggest requests executed against the `_suggest` endpoint should omit
|
||||
the surrounding `suggest` element which is only used if the suggest
|
||||
request is part of a search.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _suggest
|
||||
{
|
||||
"my-suggestion" : {
|
||||
"text" : "tring out Elasticsearch",
|
||||
"term" : {
|
||||
"field" : "message"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[setup:twitter]
|
||||
|
||||
Several suggestions can be specified per request. Each suggestion is
|
||||
identified with an arbitrary name. In the example below two suggestions
|
||||
are requested. Both `my-suggest-1` and `my-suggest-2` suggestions use
|
||||
|
@ -56,18 +41,20 @@ the `term` suggester, but have a different `text`.
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _suggest
|
||||
POST _search
|
||||
{
|
||||
"my-suggest-1" : {
|
||||
"text" : "tring out Elasticsearch",
|
||||
"term" : {
|
||||
"field" : "message"
|
||||
}
|
||||
},
|
||||
"my-suggest-2" : {
|
||||
"text" : "kmichy",
|
||||
"term" : {
|
||||
"field" : "user"
|
||||
"suggest": {
|
||||
"my-suggest-1" : {
|
||||
"text" : "tring out Elasticsearch",
|
||||
"term" : {
|
||||
"field" : "message"
|
||||
}
|
||||
},
|
||||
"my-suggest-2" : {
|
||||
"text" : "kmichy",
|
||||
"term" : {
|
||||
"field" : "user"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -85,27 +72,34 @@ in the suggest text and if found an arbitrary number of options.
|
|||
--------------------------------------------------
|
||||
{
|
||||
"_shards": ...
|
||||
"my-suggest-1": [ {
|
||||
"text": "tring",
|
||||
"offset": 0,
|
||||
"length": 5,
|
||||
"options": [ {"text": "trying", "score": 0.8, "freq": 1 } ]
|
||||
}, {
|
||||
"text": "out",
|
||||
"offset": 6,
|
||||
"length": 3,
|
||||
"options": []
|
||||
}, {
|
||||
"text": "elasticsearch",
|
||||
"offset": 10,
|
||||
"length": 13,
|
||||
"options": []
|
||||
} ],
|
||||
"my-suggest-2": ...
|
||||
"hits": ...
|
||||
"took": 2,
|
||||
"timed_out": false,
|
||||
"suggest": {
|
||||
"my-suggest-1": [ {
|
||||
"text": "tring",
|
||||
"offset": 0,
|
||||
"length": 5,
|
||||
"options": [ {"text": "trying", "score": 0.8, "freq": 1 } ]
|
||||
}, {
|
||||
"text": "out",
|
||||
"offset": 6,
|
||||
"length": 3,
|
||||
"options": []
|
||||
}, {
|
||||
"text": "elasticsearch",
|
||||
"offset": 10,
|
||||
"length": 13,
|
||||
"options": []
|
||||
} ],
|
||||
"my-suggest-2": ...
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/"_shards": \.\.\./"_shards": "$body._shards",/]
|
||||
// TESTRESPONSE[s/"my-suggest-2": \.\.\./"my-suggest-2": "$body.my-suggest-2"/]
|
||||
// TESTRESPONSE[s/"hits": .../"hits": "$body.hits",/]
|
||||
// TESTRESPONSE[s/"took": 2,/"took": "$body.took",/]
|
||||
// TESTRESPONSE[s/"my-suggest-2": \.\.\./"my-suggest-2": "$body.suggest.my-suggest-2"/]
|
||||
|
||||
|
||||
Each options array contains an option object that includes the
|
||||
|
@ -123,17 +117,19 @@ and applies to the `my-suggest-1` and `my-suggest-2` suggestions.
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _suggest
|
||||
POST _search
|
||||
{
|
||||
"text" : "tring out Elasticsearch",
|
||||
"my-suggest-1" : {
|
||||
"term" : {
|
||||
"field" : "message"
|
||||
}
|
||||
},
|
||||
"my-suggest-2" : {
|
||||
"term" : {
|
||||
"field" : "user"
|
||||
"suggest": {
|
||||
"text" : "tring out Elasticsearch",
|
||||
"my-suggest-1" : {
|
||||
"term" : {
|
||||
"field" : "message"
|
||||
}
|
||||
},
|
||||
"my-suggest-2" : {
|
||||
"term" : {
|
||||
"field" : "user"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -152,12 +152,14 @@ documents once deleted are never shown. This request:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST music/_suggest?pretty
|
||||
POST music/_search?pretty
|
||||
{
|
||||
"song-suggest" : {
|
||||
"prefix" : "nir",
|
||||
"completion" : {
|
||||
"field" : "suggest"
|
||||
"suggest": {
|
||||
"song-suggest" : {
|
||||
"prefix" : "nir",
|
||||
"completion" : {
|
||||
"field" : "suggest"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -175,24 +177,30 @@ returns this response:
|
|||
"successful" : 5,
|
||||
"failed" : 0
|
||||
},
|
||||
"song-suggest" : [ {
|
||||
"text" : "nir",
|
||||
"offset" : 0,
|
||||
"length" : 3,
|
||||
"options" : [ {
|
||||
"text" : "Nirvana",
|
||||
"_index": "music",
|
||||
"_type": "song",
|
||||
"_id": "1",
|
||||
"_score": 1.0,
|
||||
"_source": {
|
||||
"suggest": ["Nevermind", "Nirvana"]
|
||||
}
|
||||
"hits": ...
|
||||
"took": 2,
|
||||
"timed_out": false,
|
||||
"suggest": {
|
||||
"song-suggest" : [ {
|
||||
"text" : "nir",
|
||||
"offset" : 0,
|
||||
"length" : 3,
|
||||
"options" : [ {
|
||||
"text" : "Nirvana",
|
||||
"_index": "music",
|
||||
"_type": "song",
|
||||
"_id": "1",
|
||||
"_score": 1.0,
|
||||
"_source": {
|
||||
"suggest": ["Nevermind", "Nirvana"]
|
||||
}
|
||||
} ]
|
||||
} ]
|
||||
} ]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE[s/"hits": .../"hits": "$body.hits",/]
|
||||
// TESTRESPONSE[s/"took": 2,/"took": "$body.took",/]
|
||||
|
||||
|
||||
IMPORTANT: `_source` meta-field must be enabled, which is the default
|
||||
|
@ -289,14 +297,16 @@ you can have a typo in your search and still get results back.
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST music/_suggest?pretty
|
||||
POST music/_search?pretty
|
||||
{
|
||||
"song-suggest" : {
|
||||
"prefix" : "nor",
|
||||
"completion" : {
|
||||
"field" : "suggest",
|
||||
"fuzzy" : {
|
||||
"fuzziness" : 2
|
||||
"suggest": {
|
||||
"song-suggest" : {
|
||||
"prefix" : "nor",
|
||||
"completion" : {
|
||||
"field" : "suggest",
|
||||
"fuzzy" : {
|
||||
"fuzziness" : 2
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -346,12 +356,14 @@ you can express a prefix as a regular expression
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST music/_suggest?pretty
|
||||
POST music/_search?pretty
|
||||
{
|
||||
"song-suggest" : {
|
||||
"regex" : "n[ever|i]r",
|
||||
"completion" : {
|
||||
"field" : "suggest"
|
||||
"suggest": {
|
||||
"song-suggest" : {
|
||||
"regex" : "n[ever|i]r",
|
||||
"completion" : {
|
||||
"field" : "suggest"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -138,15 +138,17 @@ filters suggestions by multiple categories:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST place/_suggest?pretty
|
||||
POST place/_search?pretty
|
||||
{
|
||||
"suggest" : {
|
||||
"prefix" : "tim",
|
||||
"completion" : {
|
||||
"field" : "suggest",
|
||||
"size": 10,
|
||||
"contexts": {
|
||||
"place_type": [ "cafe", "restaurants" ]
|
||||
"suggest": {
|
||||
"place_suggestion" : {
|
||||
"prefix" : "tim",
|
||||
"completion" : {
|
||||
"field" : "suggest",
|
||||
"size": 10,
|
||||
"contexts": {
|
||||
"place_type": [ "cafe", "restaurants" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -165,18 +167,20 @@ suggestions associated with some categories:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST place/_suggest?pretty
|
||||
POST place/_search?pretty
|
||||
{
|
||||
"suggest" : {
|
||||
"prefix" : "tim",
|
||||
"completion" : {
|
||||
"field" : "suggest",
|
||||
"size": 10,
|
||||
"contexts": {
|
||||
"place_type": [ <1>
|
||||
{ "context" : "cafe" },
|
||||
{ "context" : "restaurants", "boost": 2 }
|
||||
]
|
||||
"suggest": {
|
||||
"place_suggestion" : {
|
||||
"prefix" : "tim",
|
||||
"completion" : {
|
||||
"field" : "suggest",
|
||||
"size": 10,
|
||||
"contexts": {
|
||||
"place_type": [ <1>
|
||||
{ "context" : "cafe" },
|
||||
{ "context" : "restaurants", "boost": 2 }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -275,17 +279,19 @@ the encoded geohash of a geo point:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST place/_suggest
|
||||
POST place/_search
|
||||
{
|
||||
"suggest" : {
|
||||
"prefix" : "tim",
|
||||
"completion" : {
|
||||
"field" : "suggest",
|
||||
"size": 10,
|
||||
"contexts": {
|
||||
"location": {
|
||||
"lat": 43.662,
|
||||
"lon": -79.380
|
||||
"suggest": {
|
||||
"place_suggestion" : {
|
||||
"prefix" : "tim",
|
||||
"completion" : {
|
||||
"field" : "suggest",
|
||||
"size": 10,
|
||||
"contexts": {
|
||||
"location": {
|
||||
"lat": 43.662,
|
||||
"lon": -79.380
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -303,28 +309,30 @@ than others, as shown by the following:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST place/_suggest?pretty
|
||||
POST place/_search?pretty
|
||||
{
|
||||
"suggest" : {
|
||||
"prefix" : "tim",
|
||||
"completion" : {
|
||||
"field" : "suggest",
|
||||
"size": 10,
|
||||
"contexts": {
|
||||
"location": [ <1>
|
||||
{
|
||||
"lat": 43.6624803,
|
||||
"lon": -79.3863353,
|
||||
"precision": 2
|
||||
},
|
||||
{
|
||||
"context": {
|
||||
"suggest": {
|
||||
"place_suggestion" : {
|
||||
"prefix" : "tim",
|
||||
"completion" : {
|
||||
"field" : "suggest",
|
||||
"size": 10,
|
||||
"contexts": {
|
||||
"location": [ <1>
|
||||
{
|
||||
"lat": 43.6624803,
|
||||
"lon": -79.3863353
|
||||
"lon": -79.3863353,
|
||||
"precision": 2
|
||||
},
|
||||
"boost": 2
|
||||
}
|
||||
]
|
||||
{
|
||||
"context": {
|
||||
"lat": 43.6624803,
|
||||
"lon": -79.3863353
|
||||
},
|
||||
"boost": 2
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -84,21 +84,23 @@ suggester in the same spot you'd use the `term` suggester:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _suggest
|
||||
POST test/_search
|
||||
{
|
||||
"text": "noble prize",
|
||||
"simple_phrase": {
|
||||
"phrase": {
|
||||
"field": "title.trigram",
|
||||
"size": 1,
|
||||
"gram_size": 3,
|
||||
"direct_generator": [ {
|
||||
"suggest": {
|
||||
"text": "noble prize",
|
||||
"simple_phrase": {
|
||||
"phrase": {
|
||||
"field": "title.trigram",
|
||||
"suggest_mode": "always"
|
||||
} ],
|
||||
"highlight": {
|
||||
"pre_tag": "<em>",
|
||||
"post_tag": "</em>"
|
||||
"size": 1,
|
||||
"gram_size": 3,
|
||||
"direct_generator": [ {
|
||||
"field": "title.trigram",
|
||||
"suggest_mode": "always"
|
||||
} ],
|
||||
"highlight": {
|
||||
"pre_tag": "<em>",
|
||||
"post_tag": "</em>"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -112,21 +114,28 @@ The response contains suggestions scored by the most likely spell correction fir
|
|||
--------------------------------------------------
|
||||
{
|
||||
"_shards": ...
|
||||
"simple_phrase" : [
|
||||
{
|
||||
"text" : "noble prize",
|
||||
"offset" : 0,
|
||||
"length" : 11,
|
||||
"options" : [ {
|
||||
"text" : "nobel prize",
|
||||
"highlighted": "<em>nobel</em> prize",
|
||||
"score" : 0.5962314
|
||||
}]
|
||||
}
|
||||
]
|
||||
"hits": ...
|
||||
"timed_out": false,
|
||||
"took": 3,
|
||||
"suggest": {
|
||||
"simple_phrase" : [
|
||||
{
|
||||
"text" : "noble prize",
|
||||
"offset" : 0,
|
||||
"length" : 11,
|
||||
"options" : [ {
|
||||
"text" : "nobel prize",
|
||||
"highlighted": "<em>nobel</em> prize",
|
||||
"score" : 0.5962314
|
||||
}]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/"_shards": .../"_shards": "$body._shards",/]
|
||||
// TESTRESPONSE[s/"hits": .../"hits": "$body.hits",/]
|
||||
// TESTRESPONSE[s/"took": 3,/"took": "$body.took",/]
|
||||
|
||||
==== Basic Phrase suggest API parameters
|
||||
|
||||
|
@ -217,28 +226,30 @@ The response contains suggestions scored by the most likely spell correction fir
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _suggest
|
||||
POST _search
|
||||
{
|
||||
"text" : "noble prize",
|
||||
"simple_phrase" : {
|
||||
"phrase" : {
|
||||
"field" : "title.trigram",
|
||||
"size" : 1,
|
||||
"direct_generator" : [ {
|
||||
"field" : "title.trigram",
|
||||
"suggest_mode" : "always",
|
||||
"min_word_length" : 1
|
||||
} ],
|
||||
"collate": {
|
||||
"query": { <1>
|
||||
"inline" : {
|
||||
"match": {
|
||||
"{{field_name}}" : "{{suggestion}}" <2>
|
||||
"suggest": {
|
||||
"text" : "noble prize",
|
||||
"simple_phrase" : {
|
||||
"phrase" : {
|
||||
"field" : "title.trigram",
|
||||
"size" : 1,
|
||||
"direct_generator" : [ {
|
||||
"field" : "title.trigram",
|
||||
"suggest_mode" : "always",
|
||||
"min_word_length" : 1
|
||||
} ],
|
||||
"collate": {
|
||||
"query": { <1>
|
||||
"inline" : {
|
||||
"match": {
|
||||
"{{field_name}}" : "{{suggestion}}" <2>
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"params": {"field_name" : "title"}, <3>
|
||||
"prune": true <4>
|
||||
},
|
||||
"params": {"field_name" : "title"}, <3>
|
||||
"prune": true <4>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -381,22 +392,24 @@ accept ordinary analyzer names.
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _suggest
|
||||
POST _search
|
||||
{
|
||||
"text" : "obel prize",
|
||||
"simple_phrase" : {
|
||||
"phrase" : {
|
||||
"field" : "title.trigram",
|
||||
"size" : 1,
|
||||
"direct_generator" : [ {
|
||||
"suggest": {
|
||||
"text" : "obel prize",
|
||||
"simple_phrase" : {
|
||||
"phrase" : {
|
||||
"field" : "title.trigram",
|
||||
"suggest_mode" : "always"
|
||||
}, {
|
||||
"field" : "title.reverse",
|
||||
"suggest_mode" : "always",
|
||||
"pre_filter" : "reverse",
|
||||
"post_filter" : "reverse"
|
||||
} ]
|
||||
"size" : 1,
|
||||
"direct_generator" : [ {
|
||||
"field" : "title.trigram",
|
||||
"suggest_mode" : "always"
|
||||
}, {
|
||||
"field" : "title.reverse",
|
||||
"suggest_mode" : "always",
|
||||
"pre_filter" : "reverse",
|
||||
"post_filter" : "reverse"
|
||||
} ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,21 +28,21 @@ import org.elasticsearch.ingest.Processor;
|
|||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.ScriptException;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.common.Strings.hasLength;
|
||||
import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationException;
|
||||
import static org.elasticsearch.ingest.ConfigurationUtils.readOptionalMap;
|
||||
import static org.elasticsearch.ingest.ConfigurationUtils.readOptionalStringProperty;
|
||||
import static org.elasticsearch.ingest.ConfigurationUtils.readStringProperty;
|
||||
import static org.elasticsearch.script.ScriptType.FILE;
|
||||
import static org.elasticsearch.script.ScriptType.INLINE;
|
||||
import static org.elasticsearch.script.ScriptType.STORED;
|
||||
|
||||
/**
|
||||
* Processor that adds new fields with their corresponding values. If the field is already present, its value
|
||||
* will be replaced with the provided one.
|
||||
* Processor that evaluates a script with an ingest document in its context.
|
||||
*/
|
||||
public final class ScriptProcessor extends AbstractProcessor {
|
||||
|
||||
|
@ -51,12 +51,24 @@ public final class ScriptProcessor extends AbstractProcessor {
|
|||
private final Script script;
|
||||
private final ScriptService scriptService;
|
||||
|
||||
/**
|
||||
* Processor that evaluates a script with an ingest document in its context
|
||||
*
|
||||
* @param tag The processor's tag.
|
||||
* @param script The {@link Script} to execute.
|
||||
* @param scriptService The {@link ScriptService} used to execute the script.
|
||||
*/
|
||||
ScriptProcessor(String tag, Script script, ScriptService scriptService) {
|
||||
super(tag);
|
||||
this.script = script;
|
||||
this.scriptService = scriptService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the script with the Ingest document in context.
|
||||
*
|
||||
* @param document The Ingest document passed into the script context under the "ctx" object.
|
||||
*/
|
||||
@Override
|
||||
public void execute(IngestDocument document) {
|
||||
ExecutableScript executableScript = scriptService.executable(script, ScriptContext.Standard.INGEST);
|
||||
|
@ -111,16 +123,27 @@ public final class ScriptProcessor extends AbstractProcessor {
|
|||
}
|
||||
|
||||
final Script script;
|
||||
String scriptPropertyUsed;
|
||||
if (Strings.hasLength(file)) {
|
||||
script = new Script(FILE, lang, file, (Map<String, Object>)params);
|
||||
scriptPropertyUsed = "file";
|
||||
} else if (Strings.hasLength(inline)) {
|
||||
script = new Script(INLINE, lang, inline, (Map<String, Object>)params);
|
||||
scriptPropertyUsed = "inline";
|
||||
} else if (Strings.hasLength(id)) {
|
||||
script = new Script(STORED, lang, id, (Map<String, Object>)params);
|
||||
scriptPropertyUsed = "id";
|
||||
} else {
|
||||
throw newConfigurationException(TYPE, processorTag, null, "Could not initialize script");
|
||||
}
|
||||
|
||||
// verify script is able to be compiled before successfully creating processor.
|
||||
try {
|
||||
scriptService.compile(script, ScriptContext.Standard.INGEST, script.getOptions());
|
||||
} catch (ScriptException e) {
|
||||
throw newConfigurationException(TYPE, processorTag, scriptPropertyUsed, e);
|
||||
}
|
||||
|
||||
return new ScriptProcessor(processorTag, script, scriptService);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.ingest.common;
|
|||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptException;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Before;
|
||||
|
@ -31,7 +32,9 @@ import java.util.Map;
|
|||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class ScriptProcessorFactoryTests extends ESTestCase {
|
||||
|
||||
|
@ -98,4 +101,22 @@ public class ScriptProcessorFactoryTests extends ESTestCase {
|
|||
|
||||
assertThat(exception.getMessage(), is("Need [file], [id], or [inline] parameter to refer to scripts"));
|
||||
}
|
||||
|
||||
public void testFactoryInvalidateWithInvalidCompiledScript() throws Exception {
|
||||
String randomType = randomFrom("inline", "file", "id");
|
||||
ScriptService mockedScriptService = mock(ScriptService.class);
|
||||
ScriptException thrownException = new ScriptException("compile-time exception", new RuntimeException(),
|
||||
Collections.emptyList(), "script", "mockscript");
|
||||
when(mockedScriptService.compile(any(), any(), any())).thenThrow(thrownException);
|
||||
factory = new ScriptProcessor.Factory(mockedScriptService);
|
||||
|
||||
Map<String, Object> configMap = new HashMap<>();
|
||||
configMap.put("lang", "mockscript");
|
||||
configMap.put(randomType, "my_script");
|
||||
|
||||
ElasticsearchException exception = expectThrows(ElasticsearchException.class,
|
||||
() -> factory.create(null, randomAsciiOfLength(10), configMap));
|
||||
|
||||
assertThat(exception.getMessage(), is("compile-time exception"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,12 +24,10 @@ import java.util.Map;
|
|||
|
||||
import org.elasticsearch.ingest.IngestDocument;
|
||||
import org.elasticsearch.ingest.RandomDocumentPicks;
|
||||
import org.elasticsearch.script.CompiledScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.mockito.stubbing.Answer;
|
||||
|
||||
import static org.hamcrest.Matchers.hasKey;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
|
|
|
@ -117,16 +117,13 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler<ReindexReq
|
|||
|
||||
@Override
|
||||
protected ReindexRequest buildRequest(RestRequest request) throws IOException {
|
||||
if (false == request.hasContent()) {
|
||||
throw new IllegalArgumentException("_reindex requires a request body");
|
||||
}
|
||||
if (request.hasParam("pipeline")) {
|
||||
throw new IllegalArgumentException("_reindex doesn't support [pipeline] as a query parmaeter. "
|
||||
+ "Specify it in the [dest] object instead.");
|
||||
}
|
||||
ReindexRequest internal = new ReindexRequest(new SearchRequest(), new IndexRequest());
|
||||
try (XContentParser xcontent = XContentFactory.xContent(request.content()).createParser(request.content())) {
|
||||
PARSER.parse(xcontent, internal, new ReindexParseContext(searchRequestParsers, parseFieldMatcher));
|
||||
try (XContentParser parser = request.contentParser()) {
|
||||
PARSER.parse(parser, internal, new ReindexParseContext(searchRequestParsers, parseFieldMatcher));
|
||||
}
|
||||
return internal;
|
||||
}
|
||||
|
|
|
@ -385,7 +385,6 @@ public class Netty4Transport extends TcpTransport<Channel> {
|
|||
}
|
||||
throw e;
|
||||
}
|
||||
onAfterChannelsConnected(nodeChannels);
|
||||
success = true;
|
||||
} finally {
|
||||
if (success == false) {
|
||||
|
@ -399,14 +398,6 @@ public class Netty4Transport extends TcpTransport<Channel> {
|
|||
return nodeChannels;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows for logic to be executed after a connection has been made on all channels. While this method is being executed, the node is
|
||||
* not listed as being connected to.
|
||||
* @param nodeChannels the {@link NodeChannels} that have been connected
|
||||
*/
|
||||
protected void onAfterChannelsConnected(NodeChannels nodeChannels) {
|
||||
}
|
||||
|
||||
private class ChannelCloseListener implements ChannelFutureListener {
|
||||
|
||||
private final DiscoveryNode node;
|
||||
|
@ -417,6 +408,7 @@ public class Netty4Transport extends TcpTransport<Channel> {
|
|||
|
||||
@Override
|
||||
public void operationComplete(final ChannelFuture future) throws Exception {
|
||||
onChannelClosed(future.channel());
|
||||
NodeChannels nodeChannels = connectedNodes.get(node);
|
||||
if (nodeChannels != null && nodeChannels.hasChannel(future.channel())) {
|
||||
threadPool.generic().execute(() -> disconnectFromNode(node, future.channel(), "channel closed event"));
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.elasticsearch.common.inject.Inject;
|
|||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
|
@ -83,7 +82,7 @@ public class TestDeprecationHeaderRestAction extends BaseRestHandler {
|
|||
public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
|
||||
final List<String> settings;
|
||||
|
||||
try (XContentParser parser = XContentFactory.xContent(request.content()).createParser(request.content())) {
|
||||
try (XContentParser parser = request.contentParser()) {
|
||||
final Map<String, Object> source = parser.map();
|
||||
|
||||
if (source.containsKey("deprecated_settings")) {
|
||||
|
|
|
@ -115,3 +115,26 @@
|
|||
- match: { _source.bytes_in: 1234 }
|
||||
- match: { _source.bytes_out: 4321 }
|
||||
- match: { _source.bytes_total: 5555 }
|
||||
|
||||
---
|
||||
"Test script processor with syntax error in inline script":
|
||||
- do:
|
||||
catch: request
|
||||
ingest.put_pipeline:
|
||||
id: "my_pipeline"
|
||||
body: >
|
||||
{
|
||||
"description": "_description",
|
||||
"processors": [
|
||||
{
|
||||
"script" : {
|
||||
"inline": "invalid painless, hear me roar!"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
- match: { error.header.processor_type: "script" }
|
||||
- match: { error.header.property_name: "inline" }
|
||||
- match: { error.type: "script_exception" }
|
||||
- match: { error.reason: "compile error" }
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"suggest": {
|
||||
"documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/search-suggesters.html",
|
||||
"methods": ["POST", "GET"],
|
||||
"methods": ["POST"],
|
||||
"url": {
|
||||
"path": "/_suggest",
|
||||
"paths": ["/_suggest", "/{index}/_suggest"],
|
||||
|
@ -13,18 +13,18 @@
|
|||
},
|
||||
"params": {
|
||||
"ignore_unavailable": {
|
||||
"type" : "boolean",
|
||||
"description" : "Whether specified concrete indices should be ignored when unavailable (missing or closed)"
|
||||
"type" : "boolean",
|
||||
"description" : "Whether specified concrete indices should be ignored when unavailable (missing or closed)"
|
||||
},
|
||||
"allow_no_indices": {
|
||||
"type" : "boolean",
|
||||
"description" : "Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)"
|
||||
"type" : "boolean",
|
||||
"description" : "Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)"
|
||||
},
|
||||
"expand_wildcards": {
|
||||
"type" : "enum",
|
||||
"options" : ["open","closed","none","all"],
|
||||
"default" : "open",
|
||||
"description" : "Whether to expand wildcard expression to concrete indices that are open, closed or both."
|
||||
"type" : "enum",
|
||||
"options" : ["open","closed","none","all"],
|
||||
"default" : "open",
|
||||
"description" : "Whether to expand wildcard expression to concrete indices that are open, closed or both."
|
||||
},
|
||||
"preference": {
|
||||
"type" : "string",
|
||||
|
@ -41,4 +41,4 @@
|
|||
"required" : true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -38,22 +38,24 @@ setup:
|
|||
indices.refresh: {}
|
||||
|
||||
- do:
|
||||
suggest:
|
||||
search:
|
||||
index: test1
|
||||
body:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: bar.completion
|
||||
suggest:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: bar.completion
|
||||
|
||||
- do:
|
||||
suggest:
|
||||
search:
|
||||
index: test1
|
||||
body:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: baz.completion
|
||||
suggest:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: baz.completion
|
||||
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
@ -291,4 +293,3 @@ setup:
|
|||
- gt: { _all.total.completion.fields.bar\.completion.size_in_bytes: 0 }
|
||||
- is_false: _all.total.completion.fields.baz\.completion
|
||||
- is_false: _all.total.fielddata.fields
|
||||
|
||||
|
|
|
@ -13,6 +13,24 @@ setup:
|
|||
"Basic tests for suggest API":
|
||||
|
||||
- do:
|
||||
search:
|
||||
body:
|
||||
suggest:
|
||||
test_suggestion:
|
||||
text: "The Amsterdma meetpu"
|
||||
term:
|
||||
field: body
|
||||
|
||||
- match: {suggest.test_suggestion.1.options.0.text: amsterdam}
|
||||
- match: {suggest.test_suggestion.2.options.0.text: meetup}
|
||||
|
||||
---
|
||||
"Suggest API should have deprecation warning":
|
||||
- skip:
|
||||
features: 'warnings'
|
||||
- do:
|
||||
warnings:
|
||||
- "[POST /_suggest] is deprecated! Use [POST /_search] instead."
|
||||
suggest:
|
||||
body:
|
||||
test_suggestion:
|
||||
|
|
|
@ -0,0 +1,314 @@
|
|||
# This test creates one huge mapping in the setup
|
||||
# Every test should use its own field to make sure it works
|
||||
|
||||
setup:
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
mappings:
|
||||
test:
|
||||
"properties":
|
||||
"suggest_1":
|
||||
"type" : "completion"
|
||||
"suggest_2":
|
||||
"type" : "completion"
|
||||
"suggest_3":
|
||||
"type" : "completion"
|
||||
"suggest_4":
|
||||
"type" : "completion"
|
||||
"suggest_5a":
|
||||
"type" : "completion"
|
||||
"suggest_5b":
|
||||
"type" : "completion"
|
||||
"suggest_6":
|
||||
"type" : "completion"
|
||||
title:
|
||||
type: keyword
|
||||
|
||||
---
|
||||
"Simple suggestion should work":
|
||||
- skip:
|
||||
features: 'warnings'
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
body:
|
||||
suggest_1: "bar"
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 2
|
||||
body:
|
||||
suggest_1: "baz"
|
||||
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
- do:
|
||||
warnings:
|
||||
- "[POST /_suggest] is deprecated! Use [POST /_search] instead."
|
||||
suggest:
|
||||
body:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: suggest_1
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 2 }
|
||||
|
||||
---
|
||||
"Simple suggestion array should work":
|
||||
- skip:
|
||||
features: 'warnings'
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
body:
|
||||
suggest_2: ["bar", "foo"]
|
||||
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
- do:
|
||||
warnings:
|
||||
- "[POST /_suggest] is deprecated! Use [POST /_search] instead."
|
||||
suggest:
|
||||
body:
|
||||
result:
|
||||
text: "f"
|
||||
completion:
|
||||
field: suggest_2
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 1 }
|
||||
- match: { result.0.options.0.text: "foo" }
|
||||
|
||||
- do:
|
||||
warnings:
|
||||
- "[POST /_suggest] is deprecated! Use [POST /_search] instead."
|
||||
suggest:
|
||||
body:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: suggest_2
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 1 }
|
||||
- match: { result.0.options.0.text: "bar" }
|
||||
|
||||
---
|
||||
"Suggestion entry should work":
|
||||
- skip:
|
||||
features: 'warnings'
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
body:
|
||||
suggest_3:
|
||||
input: "bar"
|
||||
weight: 2
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 2
|
||||
body:
|
||||
suggest_3:
|
||||
input: "baz"
|
||||
weight: 3
|
||||
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
- do:
|
||||
warnings:
|
||||
- "[POST /_suggest] is deprecated! Use [POST /_search] instead."
|
||||
suggest:
|
||||
body:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: suggest_3
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 2 }
|
||||
- match: { result.0.options.0.text: "baz" }
|
||||
- match: { result.0.options.1.text: "bar" }
|
||||
|
||||
---
|
||||
"Suggestion entry array should work":
|
||||
- skip:
|
||||
features: 'warnings'
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
body:
|
||||
suggest_4:
|
||||
- input: "bar"
|
||||
weight: 3
|
||||
- input: "fo"
|
||||
weight: 3
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 2
|
||||
body:
|
||||
suggest_4:
|
||||
- input: "baz"
|
||||
weight: 2
|
||||
- input: "foo"
|
||||
weight: 1
|
||||
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
- do:
|
||||
warnings:
|
||||
- "[POST /_suggest] is deprecated! Use [POST /_search] instead."
|
||||
suggest:
|
||||
body:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: suggest_4
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 2 }
|
||||
- match: { result.0.options.0.text: "bar" }
|
||||
- match: { result.0.options.1.text: "baz" }
|
||||
|
||||
- do:
|
||||
warnings:
|
||||
- "[POST /_suggest] is deprecated! Use [POST /_search] instead."
|
||||
suggest:
|
||||
body:
|
||||
result:
|
||||
text: "f"
|
||||
completion:
|
||||
field: suggest_4
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 2 }
|
||||
- match: { result.0.options.0.text: "fo" }
|
||||
- match: { result.0.options.1.text: "foo" }
|
||||
|
||||
---
|
||||
"Multiple Completion fields should work":
|
||||
- skip:
|
||||
features: 'warnings'
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
body:
|
||||
suggest_5a: "bar"
|
||||
suggest_5b: "baz"
|
||||
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
- do:
|
||||
warnings:
|
||||
- "[POST /_suggest] is deprecated! Use [POST /_search] instead."
|
||||
suggest:
|
||||
body:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: suggest_5a
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 1 }
|
||||
- match: { result.0.options.0.text: "bar" }
|
||||
|
||||
- do:
|
||||
warnings:
|
||||
- "[POST /_suggest] is deprecated! Use [POST /_search] instead."
|
||||
suggest:
|
||||
body:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: suggest_5b
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 1 }
|
||||
- match: { result.0.options.0.text: "baz" }
|
||||
|
||||
---
|
||||
"Suggestions with source should work":
|
||||
- skip:
|
||||
features: 'warnings'
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
body:
|
||||
suggest_6:
|
||||
input: "bar"
|
||||
weight: 2
|
||||
title: "title_bar"
|
||||
count: 4
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 2
|
||||
body:
|
||||
suggest_6:
|
||||
input: "baz"
|
||||
weight: 3
|
||||
title: "title_baz"
|
||||
count: 3
|
||||
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
- do:
|
||||
warnings:
|
||||
- "[POST /_suggest] is deprecated! Use [POST /_search] instead."
|
||||
suggest:
|
||||
body:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: suggest_6
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 2 }
|
||||
- match: { result.0.options.0.text: "baz" }
|
||||
- match: { result.0.options.0._index: "test" }
|
||||
- match: { result.0.options.0._type: "test" }
|
||||
- match: { result.0.options.0._source.title: "title_baz" }
|
||||
- match: { result.0.options.0._source.count: 3 }
|
||||
- match: { result.0.options.1.text: "bar" }
|
||||
- match: { result.0.options.1._index: "test" }
|
||||
- match: { result.0.options.1._type: "test" }
|
||||
- match: { result.0.options.1._source.title: "title_bar" }
|
||||
- match: { result.0.options.1._source.count: 4 }
|
|
@ -50,15 +50,16 @@ setup:
|
|||
indices.refresh: {}
|
||||
|
||||
- do:
|
||||
suggest:
|
||||
search:
|
||||
body:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: suggest_1
|
||||
suggest:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: suggest_1
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 2 }
|
||||
- length: { suggest.result: 1 }
|
||||
- length: { suggest.result.0.options: 2 }
|
||||
|
||||
---
|
||||
"Simple suggestion array should work":
|
||||
|
@ -75,28 +76,30 @@ setup:
|
|||
indices.refresh: {}
|
||||
|
||||
- do:
|
||||
suggest:
|
||||
search:
|
||||
body:
|
||||
result:
|
||||
text: "f"
|
||||
completion:
|
||||
field: suggest_2
|
||||
suggest:
|
||||
result:
|
||||
text: "f"
|
||||
completion:
|
||||
field: suggest_2
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 1 }
|
||||
- match: { result.0.options.0.text: "foo" }
|
||||
- length: { suggest.result: 1 }
|
||||
- length: { suggest.result.0.options: 1 }
|
||||
- match: { suggest.result.0.options.0.text: "foo" }
|
||||
|
||||
- do:
|
||||
suggest:
|
||||
search:
|
||||
body:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: suggest_2
|
||||
suggest:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: suggest_2
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 1 }
|
||||
- match: { result.0.options.0.text: "bar" }
|
||||
- length: { suggest.result: 1 }
|
||||
- length: { suggest.result.0.options: 1 }
|
||||
- match: { suggest.result.0.options.0.text: "bar" }
|
||||
|
||||
---
|
||||
"Suggestion entry should work":
|
||||
|
@ -125,17 +128,18 @@ setup:
|
|||
indices.refresh: {}
|
||||
|
||||
- do:
|
||||
suggest:
|
||||
search:
|
||||
body:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: suggest_3
|
||||
suggest:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: suggest_3
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 2 }
|
||||
- match: { result.0.options.0.text: "baz" }
|
||||
- match: { result.0.options.1.text: "bar" }
|
||||
- length: { suggest.result: 1 }
|
||||
- length: { suggest.result.0.options: 2 }
|
||||
- match: { suggest.result.0.options.0.text: "baz" }
|
||||
- match: { suggest.result.0.options.1.text: "bar" }
|
||||
|
||||
---
|
||||
"Suggestion entry array should work":
|
||||
|
@ -168,30 +172,32 @@ setup:
|
|||
indices.refresh: {}
|
||||
|
||||
- do:
|
||||
suggest:
|
||||
search:
|
||||
body:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: suggest_4
|
||||
suggest:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: suggest_4
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 2 }
|
||||
- match: { result.0.options.0.text: "bar" }
|
||||
- match: { result.0.options.1.text: "baz" }
|
||||
- length: { suggest.result: 1 }
|
||||
- length: { suggest.result.0.options: 2 }
|
||||
- match: { suggest.result.0.options.0.text: "bar" }
|
||||
- match: { suggest.result.0.options.1.text: "baz" }
|
||||
|
||||
- do:
|
||||
suggest:
|
||||
search:
|
||||
body:
|
||||
result:
|
||||
text: "f"
|
||||
completion:
|
||||
field: suggest_4
|
||||
suggest:
|
||||
result:
|
||||
text: "f"
|
||||
completion:
|
||||
field: suggest_4
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 2 }
|
||||
- match: { result.0.options.0.text: "fo" }
|
||||
- match: { result.0.options.1.text: "foo" }
|
||||
- length: { suggest.result: 1 }
|
||||
- length: { suggest.result.0.options: 2 }
|
||||
- match: { suggest.result.0.options.0.text: "fo" }
|
||||
- match: { suggest.result.0.options.1.text: "foo" }
|
||||
|
||||
---
|
||||
"Multiple Completion fields should work":
|
||||
|
@ -209,28 +215,30 @@ setup:
|
|||
indices.refresh: {}
|
||||
|
||||
- do:
|
||||
suggest:
|
||||
search:
|
||||
body:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: suggest_5a
|
||||
suggest:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: suggest_5a
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 1 }
|
||||
- match: { result.0.options.0.text: "bar" }
|
||||
- length: { suggest.result: 1 }
|
||||
- length: { suggest.result.0.options: 1 }
|
||||
- match: { suggest.result.0.options.0.text: "bar" }
|
||||
|
||||
- do:
|
||||
suggest:
|
||||
search:
|
||||
body:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: suggest_5b
|
||||
suggest:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: suggest_5b
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 1 }
|
||||
- match: { result.0.options.0.text: "baz" }
|
||||
- length: { suggest.result: 1 }
|
||||
- length: { suggest.result.0.options: 1 }
|
||||
- match: { suggest.result.0.options.0.text: "baz" }
|
||||
|
||||
---
|
||||
"Suggestions with source should work":
|
||||
|
@ -263,23 +271,23 @@ setup:
|
|||
indices.refresh: {}
|
||||
|
||||
- do:
|
||||
suggest:
|
||||
search:
|
||||
body:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: suggest_6
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 2 }
|
||||
- match: { result.0.options.0.text: "baz" }
|
||||
- match: { result.0.options.0._index: "test" }
|
||||
- match: { result.0.options.0._type: "test" }
|
||||
- match: { result.0.options.0._source.title: "title_baz" }
|
||||
- match: { result.0.options.0._source.count: 3 }
|
||||
- match: { result.0.options.1.text: "bar" }
|
||||
- match: { result.0.options.1._index: "test" }
|
||||
- match: { result.0.options.1._type: "test" }
|
||||
- match: { result.0.options.1._source.title: "title_bar" }
|
||||
- match: { result.0.options.1._source.count: 4 }
|
||||
suggest:
|
||||
result:
|
||||
text: "b"
|
||||
completion:
|
||||
field: suggest_6
|
||||
|
||||
- length: { suggest.result: 1 }
|
||||
- length: { suggest.result.0.options: 2 }
|
||||
- match: { suggest.result.0.options.0.text: "baz" }
|
||||
- match: { suggest.result.0.options.0._index: "test" }
|
||||
- match: { suggest.result.0.options.0._type: "test" }
|
||||
- match: { suggest.result.0.options.0._source.title: "title_baz" }
|
||||
- match: { suggest.result.0.options.0._source.count: 3 }
|
||||
- match: { suggest.result.0.options.1.text: "bar" }
|
||||
- match: { suggest.result.0.options.1._index: "test" }
|
||||
- match: { suggest.result.0.options.1._type: "test" }
|
||||
- match: { suggest.result.0.options.1._source.title: "title_bar" }
|
||||
- match: { suggest.result.0.options.1._source.count: 4 }
|
||||
|
|
|
@ -74,18 +74,19 @@ setup:
|
|||
indices.refresh: {}
|
||||
|
||||
- do:
|
||||
suggest:
|
||||
search:
|
||||
body:
|
||||
result:
|
||||
text: "foo"
|
||||
completion:
|
||||
field: suggest_context
|
||||
contexts:
|
||||
color: "red"
|
||||
suggest:
|
||||
result:
|
||||
text: "foo"
|
||||
completion:
|
||||
field: suggest_context
|
||||
contexts:
|
||||
color: "red"
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 1 }
|
||||
- match: { result.0.options.0.text: "foo red" }
|
||||
- length: { suggest.result: 1 }
|
||||
- length: { suggest.result.0.options: 1 }
|
||||
- match: { suggest.result.0.options.0.text: "foo red" }
|
||||
|
||||
---
|
||||
"Category suggest context from path should work":
|
||||
|
@ -114,45 +115,48 @@ setup:
|
|||
indices.refresh: {}
|
||||
|
||||
- do:
|
||||
suggest:
|
||||
search:
|
||||
body:
|
||||
result:
|
||||
text: "foo"
|
||||
completion:
|
||||
field: suggest_context_with_path
|
||||
contexts:
|
||||
color: "red"
|
||||
suggest:
|
||||
result:
|
||||
text: "foo"
|
||||
completion:
|
||||
field: suggest_context_with_path
|
||||
contexts:
|
||||
color: "red"
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 1 }
|
||||
- match: { result.0.options.0.text: "Foo red" }
|
||||
- length: { suggest.result: 1 }
|
||||
- length: { suggest.result.0.options: 1 }
|
||||
- match: { suggest.result.0.options.0.text: "Foo red" }
|
||||
|
||||
- do:
|
||||
suggest:
|
||||
search:
|
||||
body:
|
||||
result:
|
||||
text: "foo"
|
||||
completion:
|
||||
field: suggest_context_with_path
|
||||
contexts:
|
||||
color: "blue"
|
||||
suggest:
|
||||
result:
|
||||
text: "foo"
|
||||
completion:
|
||||
field: suggest_context_with_path
|
||||
contexts:
|
||||
color: "blue"
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 1 }
|
||||
- match: { result.0.options.0.text: "Foo blue" }
|
||||
- length: { suggest.result: 1 }
|
||||
- length: { suggest.result.0.options: 1 }
|
||||
- match: { suggest.result.0.options.0.text: "Foo blue" }
|
||||
|
||||
- do:
|
||||
suggest:
|
||||
search:
|
||||
body:
|
||||
result:
|
||||
text: "foo"
|
||||
completion:
|
||||
field: suggest_context_with_path
|
||||
contexts:
|
||||
color: ["blue", "red"]
|
||||
suggest:
|
||||
result:
|
||||
text: "foo"
|
||||
completion:
|
||||
field: suggest_context_with_path
|
||||
contexts:
|
||||
color: ["blue", "red"]
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 2 }
|
||||
- length: { suggest.result: 1 }
|
||||
- length: { suggest.result.0.options: 2 }
|
||||
|
||||
---
|
||||
"Geo suggest should work":
|
||||
|
@ -190,21 +194,22 @@ setup:
|
|||
indices.get_mapping: {}
|
||||
|
||||
- do:
|
||||
suggest:
|
||||
search:
|
||||
index: test
|
||||
body:
|
||||
result:
|
||||
text: "mar"
|
||||
completion:
|
||||
field: suggest_geo
|
||||
contexts:
|
||||
location:
|
||||
lat : 52.2263
|
||||
lon : 4.543
|
||||
suggest:
|
||||
result:
|
||||
text: "mar"
|
||||
completion:
|
||||
field: suggest_geo
|
||||
contexts:
|
||||
location:
|
||||
lat : 52.2263
|
||||
lon : 4.543
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 1 }
|
||||
- match: { result.0.options.0.text: "Marriot in Amsterdam" }
|
||||
- length: { suggest.result: 1 }
|
||||
- length: { suggest.result.0.options: 1 }
|
||||
- match: { suggest.result.0.options.0.text: "Marriot in Amsterdam" }
|
||||
|
||||
---
|
||||
"Multi contexts should work":
|
||||
|
@ -240,33 +245,35 @@ setup:
|
|||
indices.get_mapping: {}
|
||||
|
||||
- do:
|
||||
suggest:
|
||||
search:
|
||||
index: test
|
||||
body:
|
||||
result:
|
||||
text: "mar"
|
||||
completion:
|
||||
field: suggest_multi_contexts
|
||||
contexts:
|
||||
location:
|
||||
lat : 52.22
|
||||
lon : 4.53
|
||||
suggest:
|
||||
result:
|
||||
text: "mar"
|
||||
completion:
|
||||
field: suggest_multi_contexts
|
||||
contexts:
|
||||
location:
|
||||
lat : 52.22
|
||||
lon : 4.53
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 1 }
|
||||
- match: { result.0.options.0.text: "Marriot in Amsterdam" }
|
||||
- length: { suggest.result: 1 }
|
||||
- length: { suggest.result.0.options: 1 }
|
||||
- match: { suggest.result.0.options.0.text: "Marriot in Amsterdam" }
|
||||
|
||||
- do:
|
||||
suggest:
|
||||
search:
|
||||
index: test
|
||||
body:
|
||||
result:
|
||||
text: "mar"
|
||||
completion:
|
||||
field: suggest_multi_contexts
|
||||
contexts:
|
||||
color: "blue"
|
||||
suggest:
|
||||
result:
|
||||
text: "mar"
|
||||
completion:
|
||||
field: suggest_multi_contexts
|
||||
contexts:
|
||||
color: "blue"
|
||||
|
||||
- length: { result: 1 }
|
||||
- length: { result.0.options: 1 }
|
||||
- match: { result.0.options.0.text: "Marriot in Berlin" }
|
||||
- length: { suggest.result: 1 }
|
||||
- length: { suggest.result.0.options: 1 }
|
||||
- match: { suggest.result.0.options.0.text: "Marriot in Berlin" }
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
rootProject.name = 'elasticsearch'
|
||||
String dirName = rootProject.projectDir.name
|
||||
rootProject.name = dirName
|
||||
|
||||
List projects = [
|
||||
'build-tools',
|
||||
|
@ -88,7 +89,7 @@ if (isEclipse) {
|
|||
/**
|
||||
* Iterates over sub directories, looking for build.gradle, and adds a project if found
|
||||
* for that dir with the given path prefix. Note that this requires each level
|
||||
* of the dir hiearchy to have a build.gradle. Otherwise we would have to iterate
|
||||
* of the dir hierarchy to have a build.gradle. Otherwise we would have to iterate
|
||||
* all files/directories in the source tree to find all projects.
|
||||
*/
|
||||
void addSubProjects(String path, File dir) {
|
||||
|
@ -98,17 +99,18 @@ void addSubProjects(String path, File dir) {
|
|||
|
||||
String projectName = "${path}:${dir.name}"
|
||||
include projectName
|
||||
if (path.isEmpty()) {
|
||||
project(projectName).projectDir = dir
|
||||
}
|
||||
for (File subdir : dir.listFiles()) {
|
||||
addSubProjects(projectName, subdir)
|
||||
}
|
||||
}
|
||||
|
||||
// look for extra plugins for elasticsearch
|
||||
File xplugins = new File(rootProject.projectDir.parentFile, 'x-plugins')
|
||||
if (xplugins.exists()) {
|
||||
include ':x-plugins'
|
||||
project(':x-plugins').projectDir = xplugins
|
||||
for (File extraPluginDir : xplugins.listFiles()) {
|
||||
addSubProjects(':x-plugins', extraPluginDir)
|
||||
File extraProjects = new File(rootProject.projectDir.parentFile, "${dirName}-extra")
|
||||
if (extraProjects.exists()) {
|
||||
for (File extraProjectDir : extraProjects.listFiles()) {
|
||||
addSubProjects('', extraProjectDir)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1065,7 +1065,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
protected void ensureClusterSizeConsistency() {
|
||||
if (cluster() != null) { // if static init fails the cluster can be null
|
||||
if (cluster() != null && cluster().size() > 0) { // if static init fails the cluster can be null
|
||||
logger.trace("Check consistency for [{}] nodes", cluster().size());
|
||||
assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(cluster().size())).get());
|
||||
}
|
||||
|
@ -1075,7 +1075,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
* Verifies that all nodes that have the same version of the cluster state as master have same cluster state
|
||||
*/
|
||||
protected void ensureClusterStateConsistency() throws IOException {
|
||||
if (cluster() != null) {
|
||||
if (cluster() != null && cluster().size() > 0) {
|
||||
ClusterState masterClusterState = client().admin().cluster().prepareState().all().get().getState();
|
||||
byte[] masterClusterStateBytes = ClusterState.Builder.toBytes(masterClusterState);
|
||||
// remove local node reference
|
||||
|
|
|
@ -130,6 +130,7 @@ import java.util.stream.Stream;
|
|||
|
||||
import static org.apache.lucene.util.LuceneTestCase.TEST_NIGHTLY;
|
||||
import static org.apache.lucene.util.LuceneTestCase.rarely;
|
||||
import static org.elasticsearch.discovery.zen.ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING;
|
||||
import static org.elasticsearch.test.ESTestCase.assertBusy;
|
||||
import static org.elasticsearch.test.ESTestCase.randomFrom;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
|
@ -589,12 +590,14 @@ public final class InternalTestCluster extends TestCluster {
|
|||
.put(NodeEnvironment.NODE_ID_SEED_SETTING.getKey(), seed);
|
||||
|
||||
if (autoManageMinMasterNodes) {
|
||||
assert finalSettings.get(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()) == null :
|
||||
assert finalSettings.get(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()) == null :
|
||||
"min master nodes may not be set when auto managed";
|
||||
finalSettings
|
||||
// don't wait too long not to slow down tests
|
||||
.put(ZenDiscovery.MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING.getKey(), "5s")
|
||||
.put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), defaultMinMasterNodes);
|
||||
.put(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), defaultMinMasterNodes);
|
||||
} else if (finalSettings.get(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()) == null) {
|
||||
throw new IllegalArgumentException(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey() + " must be configured");
|
||||
}
|
||||
MockNode node = new MockNode(finalSettings.build(), plugins);
|
||||
return new NodeAndClient(name, node, nodeId);
|
||||
|
@ -883,8 +886,8 @@ public final class InternalTestCluster extends TestCluster {
|
|||
newSettings.put(callbackSettings);
|
||||
}
|
||||
if (minMasterNodes >= 0) {
|
||||
assert ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.exists(newSettings.build()) == false : "min master nodes is auto managed";
|
||||
newSettings.put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minMasterNodes).build();
|
||||
assert DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.exists(newSettings.build()) == false : "min master nodes is auto managed";
|
||||
newSettings.put(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minMasterNodes).build();
|
||||
}
|
||||
if (clearDataIfNeeded) {
|
||||
clearDataIfNeeded(callback);
|
||||
|
@ -908,6 +911,10 @@ public final class InternalTestCluster extends TestCluster {
|
|||
private void createNewNode(final Settings newSettings) {
|
||||
final long newIdSeed = NodeEnvironment.NODE_ID_SEED_SETTING.get(node.settings()) + 1; // use a new seed to make sure we have new node id
|
||||
Settings finalSettings = Settings.builder().put(node.settings()).put(newSettings).put(NodeEnvironment.NODE_ID_SEED_SETTING.getKey(), newIdSeed).build();
|
||||
if (DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.exists(finalSettings) == false) {
|
||||
throw new IllegalStateException(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey() +
|
||||
" is not configured after restart of [" + name + "]");
|
||||
}
|
||||
Collection<Class<? extends Plugin>> plugins = node.getClasspathPlugins();
|
||||
node = new MockNode(finalSettings, plugins);
|
||||
markNodeDataDirsAsNotEligableForWipe(node);
|
||||
|
@ -1694,7 +1701,7 @@ public final class InternalTestCluster extends TestCluster {
|
|||
logger.debug("updating min_master_nodes to [{}]", minMasterNodes);
|
||||
try {
|
||||
assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(
|
||||
Settings.builder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minMasterNodes)
|
||||
Settings.builder().put(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minMasterNodes)
|
||||
));
|
||||
} catch (Exception e) {
|
||||
throw new ElasticsearchException("failed to update minimum master node to [{}] (current masters [{}])", e,
|
||||
|
|
|
@ -46,9 +46,11 @@ import org.junit.After;
|
|||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.net.InetAddress;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.net.ServerSocket;
|
||||
import java.net.Socket;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
@ -1847,4 +1849,39 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
|
|||
assertEquals("[][" + dummy.getAddress() +"] handshake_timeout[1ms]", ex.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void testTcpHandshakeConnectionReset() throws IOException, InterruptedException {
|
||||
try (ServerSocket socket = new ServerSocket()) {
|
||||
socket.bind(new InetSocketAddress(InetAddress.getLocalHost(), 0), 1);
|
||||
socket.setReuseAddress(true);
|
||||
DiscoveryNode dummy = new DiscoveryNode("TEST", new TransportAddress(socket.getInetAddress(),
|
||||
socket.getLocalPort()), emptyMap(),
|
||||
emptySet(), version0);
|
||||
Thread t = new Thread() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
Socket accept = socket.accept();
|
||||
accept.close();
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
}
|
||||
};
|
||||
t.start();
|
||||
ConnectionProfile.Builder builder = new ConnectionProfile.Builder();
|
||||
builder.addConnections(1,
|
||||
TransportRequestOptions.Type.BULK,
|
||||
TransportRequestOptions.Type.PING,
|
||||
TransportRequestOptions.Type.RECOVERY,
|
||||
TransportRequestOptions.Type.REG,
|
||||
TransportRequestOptions.Type.STATE);
|
||||
builder.setHandshakeTimeout(TimeValue.timeValueHours(1));
|
||||
ConnectTransportException ex = expectThrows(ConnectTransportException.class,
|
||||
() -> serviceA.connectToNode(dummy, builder.build()));
|
||||
assertEquals("[][" + dummy.getAddress() +"] general node connection failure", ex.getMessage());
|
||||
assertEquals("handshake failed", ex.getCause().getMessage());
|
||||
t.join();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -336,6 +336,7 @@ public class MockTcpTransport extends TcpTransport<MockTcpTransport.MockChannel>
|
|||
if (isOpen.compareAndSet(true, false)) {
|
||||
//establish a happens-before edge between closing and accepting a new connection
|
||||
synchronized (this) {
|
||||
onChannelClosed(this);
|
||||
IOUtils.close(serverSocket, activeChannel, () -> IOUtils.close(workerChannels.keySet()),
|
||||
() -> cancellableThreads.cancel("channel closed"), onClose);
|
||||
}
|
||||
|
|
|
@ -36,7 +36,6 @@ import org.elasticsearch.test.NodeConfigurationSource;
|
|||
import org.elasticsearch.test.discovery.TestZenDiscovery;
|
||||
import org.elasticsearch.transport.MockTcpTransportPlugin;
|
||||
import org.elasticsearch.transport.TransportSettings;
|
||||
import org.hamcrest.Matcher;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
|
@ -61,7 +60,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFile
|
|||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileNotExists;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasEntry;
|
||||
import static org.hamcrest.Matchers.hasKey;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
|
||||
/**
|
||||
|
@ -137,22 +135,15 @@ public class InternalTestClusterTests extends ESTestCase {
|
|||
|
||||
private void assertMMNinNodeSetting(String node, InternalTestCluster cluster, int masterNodes) {
|
||||
final int minMasterNodes = masterNodes / 2 + 1;
|
||||
final Matcher<Map<? extends String, ? extends String>> minMasterMatcher =
|
||||
hasEntry(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), Integer.toString(minMasterNodes));
|
||||
final Matcher<Map<? extends String, ?>> noMinMasterNodesMatcher = not(hasKey(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()));
|
||||
Settings nodeSettings = cluster.client(node).admin().cluster().prepareNodesInfo(node).get().getNodes().get(0).getSettings();
|
||||
assertThat("node setting of node [" + node + "] has the wrong min_master_node setting: ["
|
||||
+ nodeSettings.get(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()) + "]",
|
||||
nodeSettings.getAsMap(),
|
||||
cluster.getAutoManageMinMasterNode() ? minMasterMatcher: noMinMasterNodesMatcher);
|
||||
hasEntry(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), Integer.toString(minMasterNodes)));
|
||||
}
|
||||
|
||||
private void assertMMNinClusterSetting(InternalTestCluster cluster, int masterNodes) {
|
||||
final int minMasterNodes = masterNodes / 2 + 1;
|
||||
Matcher<Map<? extends String, ? extends String>> minMasterMatcher =
|
||||
hasEntry(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), Integer.toString(minMasterNodes));
|
||||
Matcher<Map<? extends String, ?>> noMinMasterNodesMatcher = not(hasKey(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()));
|
||||
|
||||
for (final String node : cluster.getNodeNames()) {
|
||||
Settings stateSettings = cluster.client(node).admin().cluster().prepareState().setLocal(true)
|
||||
.get().getState().getMetaData().settings();
|
||||
|
@ -160,27 +151,44 @@ public class InternalTestClusterTests extends ESTestCase {
|
|||
assertThat("dynamic setting for node [" + node + "] has the wrong min_master_node setting : ["
|
||||
+ stateSettings.get(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()) + "]",
|
||||
stateSettings.getAsMap(),
|
||||
cluster.getAutoManageMinMasterNode() ? minMasterMatcher: noMinMasterNodesMatcher);
|
||||
hasEntry(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), Integer.toString(minMasterNodes)));
|
||||
}
|
||||
}
|
||||
|
||||
public void testBeforeTest() throws Exception {
|
||||
final boolean autoManageMinMasterNodes = randomBoolean();
|
||||
long clusterSeed = randomLong();
|
||||
boolean masterNodes = randomBoolean();
|
||||
int minNumDataNodes = randomIntBetween(0, 3);
|
||||
int maxNumDataNodes = randomIntBetween(minNumDataNodes, 4);
|
||||
int numClientNodes = randomIntBetween(0, 2);
|
||||
final boolean masterNodes;
|
||||
final int minNumDataNodes;
|
||||
final int maxNumDataNodes;
|
||||
if (autoManageMinMasterNodes) {
|
||||
masterNodes = randomBoolean();
|
||||
minNumDataNodes = randomIntBetween(0, 3);
|
||||
maxNumDataNodes = randomIntBetween(minNumDataNodes, 4);
|
||||
} else {
|
||||
// if we manage min master nodes, we need to lock down the number of nodes
|
||||
minNumDataNodes = randomIntBetween(0, 4);
|
||||
maxNumDataNodes = minNumDataNodes;
|
||||
masterNodes = false;
|
||||
}
|
||||
final int numClientNodes = randomIntBetween(0, 2);
|
||||
final String clusterName1 = "shared1";
|
||||
final String clusterName2 = "shared2";
|
||||
NodeConfigurationSource nodeConfigurationSource = new NodeConfigurationSource() {
|
||||
@Override
|
||||
public Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.builder()
|
||||
final Settings.Builder settings = Settings.builder()
|
||||
.put(
|
||||
NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(),
|
||||
2 * ((masterNodes ? InternalTestCluster.DEFAULT_HIGH_NUM_MASTER_NODES : 0) + maxNumDataNodes + numClientNodes))
|
||||
.put(NetworkModule.HTTP_ENABLED.getKey(), false)
|
||||
.put(NetworkModule.TRANSPORT_TYPE_KEY, MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME).build();
|
||||
.put(NetworkModule.TRANSPORT_TYPE_KEY, MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME);
|
||||
if (autoManageMinMasterNodes == false) {
|
||||
assert minNumDataNodes == maxNumDataNodes;
|
||||
assert masterNodes == false;
|
||||
settings.put(DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minNumDataNodes / 2 + 1);
|
||||
}
|
||||
return settings.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -195,7 +203,6 @@ public class InternalTestClusterTests extends ESTestCase {
|
|||
|
||||
Path baseDir = createTempDir();
|
||||
final List<Class<? extends Plugin>> mockPlugins = Arrays.asList(MockTcpTransportPlugin.class, TestZenDiscovery.TestPlugin.class);
|
||||
final boolean autoManageMinMasterNodes = randomBoolean();
|
||||
InternalTestCluster cluster0 = new InternalTestCluster(clusterSeed, baseDir, masterNodes,
|
||||
autoManageMinMasterNodes, minNumDataNodes, maxNumDataNodes, clusterName1, nodeConfigurationSource, numClientNodes,
|
||||
enableHttpPipelining, nodePrefix, mockPlugins, Function.identity());
|
||||
|
@ -258,9 +265,8 @@ public class InternalTestClusterTests extends ESTestCase {
|
|||
boolean enableHttpPipelining = randomBoolean();
|
||||
String nodePrefix = "test";
|
||||
Path baseDir = createTempDir();
|
||||
final boolean autoManageMinMasterNodes = randomBoolean();
|
||||
InternalTestCluster cluster = new InternalTestCluster(clusterSeed, baseDir, masterNodes,
|
||||
autoManageMinMasterNodes, minNumDataNodes, maxNumDataNodes, clusterName1, nodeConfigurationSource, numClientNodes,
|
||||
true, minNumDataNodes, maxNumDataNodes, clusterName1, nodeConfigurationSource, numClientNodes,
|
||||
enableHttpPipelining, nodePrefix, Arrays.asList(MockTcpTransportPlugin.class, TestZenDiscovery.TestPlugin.class),
|
||||
Function.identity());
|
||||
try {
|
||||
|
|
Loading…
Reference in New Issue