Parameterized exception messages
Added dynamic arguments to `ElasticsearchException`, `ElasticsearchParseException` and `ElasticsearchTimeoutException`. This helps keeping the exception messages clean and readable and promotes consistency around wrapping dynamic args with `[` and `]`. This is just the start, we need to propagate this to all exceptions deriving from `ElasticsearchException`. Also, work started on standardizing on lower case logging & exception messages. We need to be consistent here... - Uses the same `LoggerMessageFormat` as used by our logging infrastructure.
This commit is contained in:
parent
1e35674eb0
commit
6021bd8cca
|
@ -24,9 +24,10 @@ import com.google.common.collect.Lists;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.collect.Tuple;
|
import org.elasticsearch.common.collect.Tuple;
|
||||||
|
import org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper;
|
import org.elasticsearch.common.logging.support.LoggerMessageFormat;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.rest.HasRestHeaders;
|
import org.elasticsearch.rest.HasRestHeaders;
|
||||||
|
@ -48,21 +49,29 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
|
||||||
/**
|
/**
|
||||||
* Construct a <code>ElasticsearchException</code> with the specified detail message.
|
* Construct a <code>ElasticsearchException</code> with the specified detail message.
|
||||||
*
|
*
|
||||||
|
* The message can be parameterized using {@code {}} as placeholders for the given
|
||||||
|
* arguments
|
||||||
|
*
|
||||||
* @param msg the detail message
|
* @param msg the detail message
|
||||||
|
* @param args the arguments for the message
|
||||||
*/
|
*/
|
||||||
public ElasticsearchException(String msg) {
|
public ElasticsearchException(String msg, Object... args) {
|
||||||
super(msg);
|
super(LoggerMessageFormat.format(msg, args));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Construct a <code>ElasticsearchException</code> with the specified detail message
|
* Construct a <code>ElasticsearchException</code> with the specified detail message
|
||||||
* and nested exception.
|
* and nested exception.
|
||||||
*
|
*
|
||||||
|
* The message can be parameterized using {@code {}} as placeholders for the given
|
||||||
|
* arguments
|
||||||
|
*
|
||||||
* @param msg the detail message
|
* @param msg the detail message
|
||||||
* @param cause the nested exception
|
* @param cause the nested exception
|
||||||
|
* @param args the arguments for the message
|
||||||
*/
|
*/
|
||||||
public ElasticsearchException(String msg, Throwable cause) {
|
public ElasticsearchException(String msg, Throwable cause, Object... args) {
|
||||||
super(msg, cause);
|
super(LoggerMessageFormat.format(msg, args), cause);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ElasticsearchException(StreamInput in) throws IOException {
|
public ElasticsearchException(StreamInput in) throws IOException {
|
||||||
|
|
|
@ -29,12 +29,12 @@ import java.io.IOException;
|
||||||
*/
|
*/
|
||||||
public class ElasticsearchParseException extends ElasticsearchException {
|
public class ElasticsearchParseException extends ElasticsearchException {
|
||||||
|
|
||||||
public ElasticsearchParseException(String msg) {
|
public ElasticsearchParseException(String msg, Object... args) {
|
||||||
super(msg);
|
super(msg, args);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ElasticsearchParseException(String msg, Throwable cause) {
|
public ElasticsearchParseException(String msg, Throwable cause, Object... args) {
|
||||||
super(msg, cause);
|
super(msg, cause, args);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ElasticsearchParseException(StreamInput in) throws IOException {
|
public ElasticsearchParseException(StreamInput in) throws IOException {
|
||||||
|
|
|
@ -33,11 +33,11 @@ public class ElasticsearchTimeoutException extends ElasticsearchException {
|
||||||
super(in);
|
super(in);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ElasticsearchTimeoutException(String message) {
|
public ElasticsearchTimeoutException(String message, Object... args) {
|
||||||
super(message);
|
super(message);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ElasticsearchTimeoutException(String message, Throwable cause) {
|
public ElasticsearchTimeoutException(String message, Throwable cause, Object... args) {
|
||||||
super(message, cause);
|
super(message, cause, args);
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -20,7 +20,6 @@
|
||||||
package org.elasticsearch.action.admin.cluster.reroute;
|
package org.elasticsearch.action.admin.cluster.reroute;
|
||||||
|
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.Version;
|
|
||||||
import org.elasticsearch.action.ActionRequestValidationException;
|
import org.elasticsearch.action.ActionRequestValidationException;
|
||||||
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
||||||
import org.elasticsearch.cluster.routing.allocation.command.AllocationCommand;
|
import org.elasticsearch.cluster.routing.allocation.command.AllocationCommand;
|
||||||
|
@ -102,13 +101,13 @@ public class ClusterRerouteRequest extends AcknowledgedRequest<ClusterRerouteReq
|
||||||
if ("commands".equals(currentFieldName)) {
|
if ("commands".equals(currentFieldName)) {
|
||||||
this.commands = AllocationCommands.fromXContent(parser);
|
this.commands = AllocationCommands.fromXContent(parser);
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("failed to parse reroute request, got start array with wrong field name [" + currentFieldName + "]");
|
throw new ElasticsearchParseException("failed to parse reroute request, got start array with wrong field name [{}]", currentFieldName);
|
||||||
}
|
}
|
||||||
} else if (token.isValue()) {
|
} else if (token.isValue()) {
|
||||||
if ("dry_run".equals(currentFieldName) || "dryRun".equals(currentFieldName)) {
|
if ("dry_run".equals(currentFieldName) || "dryRun".equals(currentFieldName)) {
|
||||||
dryRun = parser.booleanValue();
|
dryRun = parser.booleanValue();
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("failed to parse reroute request, got value with wrong field name [" + currentFieldName + "]");
|
throw new ElasticsearchParseException("failed to parse reroute request, got value with wrong field name [{}]", currentFieldName);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -403,7 +403,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
||||||
try {
|
try {
|
||||||
customs.put(name, proto.fromMap((Map<String, Object>) entry.getValue()));
|
customs.put(name, proto.fromMap((Map<String, Object>) entry.getValue()));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new ElasticsearchParseException("failed to parse custom metadata for [" + name + "]");
|
throw new ElasticsearchParseException("failed to parse custom metadata for [{}]", name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -296,7 +296,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
|
||||||
try {
|
try {
|
||||||
customs.put(name, proto.fromMap((Map<String, Object>) entry.getValue()));
|
customs.put(name, proto.fromMap((Map<String, Object>) entry.getValue()));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new ElasticsearchParseException("failed to parse custom metadata for [" + name + "]");
|
throw new ElasticsearchParseException("failed to parse custom metadata for [{}]", name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -418,7 +418,7 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
|
||||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||||
fetchSourceContext = new FetchSourceContext(new String[]{parser.text()});
|
fetchSourceContext = new FetchSourceContext(new String[]{parser.text()});
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("illegal type for _source: [" + token + "]");
|
throw new ElasticsearchParseException("illegal type for _source: [{}]", token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||||
|
@ -447,7 +447,7 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
|
||||||
} else if ("excludes".equals(currentFieldName) || "exclude".equals(currentFieldName)) {
|
} else if ("excludes".equals(currentFieldName) || "exclude".equals(currentFieldName)) {
|
||||||
currentList = excludes != null ? excludes : (excludes = new ArrayList<>(2));
|
currentList = excludes != null ? excludes : (excludes = new ArrayList<>(2));
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("Source definition may not contain " + parser.text());
|
throw new ElasticsearchParseException("source definition may not contain [{}]", parser.text());
|
||||||
}
|
}
|
||||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||||
|
|
|
@ -136,7 +136,7 @@ public class MultiPercolateRequest extends ActionRequest<MultiPercolateRequest>
|
||||||
percolateRequest.onlyCount(true);
|
percolateRequest.onlyCount(true);
|
||||||
parsePercolateAction(parser, percolateRequest, allowExplicitIndex);
|
parsePercolateAction(parser, percolateRequest, allowExplicitIndex);
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException(percolateAction + " isn't a supported percolate operation");
|
throw new ElasticsearchParseException("[{}] isn't a supported percolate operation", percolateAction);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -111,18 +111,16 @@ public class MultiTermVectorsRequest extends ActionRequest<MultiTermVectorsReque
|
||||||
ids.add(parser.text());
|
ids.add(parser.text());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException(
|
throw new ElasticsearchParseException("no parameter named [{}] and type ARRAY", currentFieldName);
|
||||||
"No parameter named " + currentFieldName + "and type ARRAY");
|
|
||||||
}
|
}
|
||||||
} else if (token == XContentParser.Token.START_OBJECT && currentFieldName != null) {
|
} else if (token == XContentParser.Token.START_OBJECT && currentFieldName != null) {
|
||||||
if ("parameters".equals(currentFieldName)) {
|
if ("parameters".equals(currentFieldName)) {
|
||||||
TermVectorsRequest.parseRequest(template, parser);
|
TermVectorsRequest.parseRequest(template, parser);
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException(
|
throw new ElasticsearchParseException("no parameter named [{}] and type OBJECT", currentFieldName);
|
||||||
"No parameter named " + currentFieldName + "and type OBJECT");
|
|
||||||
}
|
}
|
||||||
} else if (currentFieldName != null) {
|
} else if (currentFieldName != null) {
|
||||||
throw new ElasticsearchParseException("_mtermvectors: Parameter " + currentFieldName + "not supported");
|
throw new ElasticsearchParseException("_mtermvectors: Parameter [{}] not supported", currentFieldName);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -588,8 +588,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
|
||||||
fields.add(parser.text());
|
fields.add(parser.text());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException(
|
throw new ElasticsearchParseException("failed to parse term vectors request. field [fields] must be an array");
|
||||||
"The parameter fields must be given as an array! Use syntax : \"fields\" : [\"field1\", \"field2\",...]");
|
|
||||||
}
|
}
|
||||||
} else if (currentFieldName.equals("offsets")) {
|
} else if (currentFieldName.equals("offsets")) {
|
||||||
termVectorsRequest.offsets(parser.booleanValue());
|
termVectorsRequest.offsets(parser.booleanValue());
|
||||||
|
@ -613,12 +612,12 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
|
||||||
termVectorsRequest.type = parser.text();
|
termVectorsRequest.type = parser.text();
|
||||||
} else if ("_id".equals(currentFieldName)) {
|
} else if ("_id".equals(currentFieldName)) {
|
||||||
if (termVectorsRequest.doc != null) {
|
if (termVectorsRequest.doc != null) {
|
||||||
throw new ElasticsearchParseException("Either \"id\" or \"doc\" can be specified, but not both!");
|
throw new ElasticsearchParseException("failed to parse term vectors request. either [id] or [doc] can be specified, but not both!");
|
||||||
}
|
}
|
||||||
termVectorsRequest.id = parser.text();
|
termVectorsRequest.id = parser.text();
|
||||||
} else if ("doc".equals(currentFieldName)) {
|
} else if ("doc".equals(currentFieldName)) {
|
||||||
if (termVectorsRequest.id != null) {
|
if (termVectorsRequest.id != null) {
|
||||||
throw new ElasticsearchParseException("Either \"id\" or \"doc\" can be specified, but not both!");
|
throw new ElasticsearchParseException("failed to parse term vectors request. either [id] or [doc] can be specified, but not both!");
|
||||||
}
|
}
|
||||||
termVectorsRequest.doc(jsonBuilder().copyCurrentStructure(parser));
|
termVectorsRequest.doc(jsonBuilder().copyCurrentStructure(parser));
|
||||||
} else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) {
|
} else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) {
|
||||||
|
@ -628,8 +627,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
|
||||||
} else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) {
|
} else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) {
|
||||||
termVectorsRequest.versionType = VersionType.fromString(parser.text());
|
termVectorsRequest.versionType = VersionType.fromString(parser.text());
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("The parameter " + currentFieldName
|
throw new ElasticsearchParseException("failed to parse term vectors request. unknown field [{}]", currentFieldName);
|
||||||
+ " is not valid for term vector request!");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -645,8 +643,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
|
||||||
if (e.getValue() instanceof String) {
|
if (e.getValue() instanceof String) {
|
||||||
mapStrStr.put(e.getKey(), (String) e.getValue());
|
mapStrStr.put(e.getKey(), (String) e.getValue());
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchException(
|
throw new ElasticsearchException("expecting the analyzer at [{}] to be a String, but found [{}] instead", e.getKey(), e.getValue().getClass());
|
||||||
"The analyzer at " + e.getKey() + " should be of type String, but got a " + e.getValue().getClass() + "!");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return mapStrStr;
|
return mapStrStr;
|
||||||
|
@ -675,8 +672,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
|
||||||
} else if (currentFieldName.equals("max_word_length")) {
|
} else if (currentFieldName.equals("max_word_length")) {
|
||||||
settings.maxWordLength = parser.intValue();
|
settings.maxWordLength = parser.intValue();
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("The parameter " + currentFieldName
|
throw new ElasticsearchParseException("failed to parse term vectors request. the field [{}] is not valid for filter parameter for term vector request", currentFieldName);
|
||||||
+ " is not valid for filter parameter for term vector request!");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -139,7 +139,7 @@ public class RepositoriesMetaData extends AbstractDiffable<Custom> implements Me
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
if (token == XContentParser.Token.FIELD_NAME) {
|
||||||
String name = parser.currentName();
|
String name = parser.currentName();
|
||||||
if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
|
if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
|
||||||
throw new ElasticsearchParseException("failed to parse repository [" + name + "], expected object");
|
throw new ElasticsearchParseException("failed to parse repository [{}], expected object", name);
|
||||||
}
|
}
|
||||||
String type = null;
|
String type = null;
|
||||||
Settings settings = Settings.EMPTY;
|
Settings settings = Settings.EMPTY;
|
||||||
|
@ -148,23 +148,23 @@ public class RepositoriesMetaData extends AbstractDiffable<Custom> implements Me
|
||||||
String currentFieldName = parser.currentName();
|
String currentFieldName = parser.currentName();
|
||||||
if ("type".equals(currentFieldName)) {
|
if ("type".equals(currentFieldName)) {
|
||||||
if (parser.nextToken() != XContentParser.Token.VALUE_STRING) {
|
if (parser.nextToken() != XContentParser.Token.VALUE_STRING) {
|
||||||
throw new ElasticsearchParseException("failed to parse repository [" + name + "], unknown type");
|
throw new ElasticsearchParseException("failed to parse repository [{}], unknown type", name);
|
||||||
}
|
}
|
||||||
type = parser.text();
|
type = parser.text();
|
||||||
} else if ("settings".equals(currentFieldName)) {
|
} else if ("settings".equals(currentFieldName)) {
|
||||||
if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
|
if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
|
||||||
throw new ElasticsearchParseException("failed to parse repository [" + name + "], incompatible params");
|
throw new ElasticsearchParseException("failed to parse repository [{}], incompatible params", name);
|
||||||
}
|
}
|
||||||
settings = Settings.settingsBuilder().put(SettingsLoader.Helper.loadNestedFromMap(parser.mapOrdered())).build();
|
settings = Settings.settingsBuilder().put(SettingsLoader.Helper.loadNestedFromMap(parser.mapOrdered())).build();
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("failed to parse repository [" + name + "], unknown field [" + currentFieldName + "]");
|
throw new ElasticsearchParseException("failed to parse repository [{}], unknown field [{}]", name, currentFieldName);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("failed to parse repository [" + name + "]");
|
throw new ElasticsearchParseException("failed to parse repository [{}]", name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (type == null) {
|
if (type == null) {
|
||||||
throw new ElasticsearchParseException("failed to parse repository [" + name + "], missing repository type");
|
throw new ElasticsearchParseException("failed to parse repository [{}], missing repository type", name);
|
||||||
}
|
}
|
||||||
repository.add(new RepositoryMetaData(name, type, settings));
|
repository.add(new RepositoryMetaData(name, type, settings));
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -81,20 +81,20 @@ public class AllocateAllocationCommand implements AllocationCommand {
|
||||||
} else if ("allow_primary".equals(currentFieldName) || "allowPrimary".equals(currentFieldName)) {
|
} else if ("allow_primary".equals(currentFieldName) || "allowPrimary".equals(currentFieldName)) {
|
||||||
allowPrimary = parser.booleanValue();
|
allowPrimary = parser.booleanValue();
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("[allocate] command does not support field [" + currentFieldName + "]");
|
throw new ElasticsearchParseException("[{}] command does not support field [{}]", NAME, currentFieldName);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("[allocate] command does not support complex json tokens [" + token + "]");
|
throw new ElasticsearchParseException("[{}] command does not support complex json tokens [{}]", NAME, token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (index == null) {
|
if (index == null) {
|
||||||
throw new ElasticsearchParseException("[allocate] command missing the index parameter");
|
throw new ElasticsearchParseException("[{}] command missing the index parameter", NAME);
|
||||||
}
|
}
|
||||||
if (shardId == -1) {
|
if (shardId == -1) {
|
||||||
throw new ElasticsearchParseException("[allocate] command missing the shard parameter");
|
throw new ElasticsearchParseException("[{}] command missing the shard parameter", NAME);
|
||||||
}
|
}
|
||||||
if (nodeId == null) {
|
if (nodeId == null) {
|
||||||
throw new ElasticsearchParseException("[allocate] command missing the node parameter");
|
throw new ElasticsearchParseException("[{}] command missing the node parameter", NAME);
|
||||||
}
|
}
|
||||||
return new AllocateAllocationCommand(new ShardId(index, shardId), nodeId, allowPrimary);
|
return new AllocateAllocationCommand(new ShardId(index, shardId), nodeId, allowPrimary);
|
||||||
}
|
}
|
||||||
|
|
|
@ -173,10 +173,10 @@ public class AllocationCommands {
|
||||||
}
|
}
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
if (token == XContentParser.Token.FIELD_NAME) {
|
||||||
if (!parser.currentName().equals("commands")) {
|
if (!parser.currentName().equals("commands")) {
|
||||||
throw new ElasticsearchParseException("expected field name to be named `commands`, got " + parser.currentName());
|
throw new ElasticsearchParseException("expected field name to be named [commands], got [{}] instead", parser.currentName());
|
||||||
}
|
}
|
||||||
if (!parser.currentName().equals("commands")) {
|
if (!parser.currentName().equals("commands")) {
|
||||||
throw new ElasticsearchParseException("expected field name to be named `commands`, got " + parser.currentName());
|
throw new ElasticsearchParseException("expected field name to be named [commands], got [{}] instead", parser.currentName());
|
||||||
}
|
}
|
||||||
token = parser.nextToken();
|
token = parser.nextToken();
|
||||||
if (token != XContentParser.Token.START_ARRAY) {
|
if (token != XContentParser.Token.START_ARRAY) {
|
||||||
|
@ -185,7 +185,7 @@ public class AllocationCommands {
|
||||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||||
// ok...
|
// ok...
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("expected either field name commands, or start array, got " + token);
|
throw new ElasticsearchParseException("expected either field name [commands], or start array, got [{}] instead", token);
|
||||||
}
|
}
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||||
if (token == XContentParser.Token.START_OBJECT) {
|
if (token == XContentParser.Token.START_OBJECT) {
|
||||||
|
@ -196,10 +196,10 @@ public class AllocationCommands {
|
||||||
commands.add(AllocationCommands.lookupFactorySafe(commandName).fromXContent(parser));
|
commands.add(AllocationCommands.lookupFactorySafe(commandName).fromXContent(parser));
|
||||||
// move to the end object one
|
// move to the end object one
|
||||||
if (parser.nextToken() != XContentParser.Token.END_OBJECT) {
|
if (parser.nextToken() != XContentParser.Token.END_OBJECT) {
|
||||||
throw new ElasticsearchParseException("allocation command is malformed, done parsing a command, but didn't get END_OBJECT, got " + token);
|
throw new ElasticsearchParseException("allocation command is malformed, done parsing a command, but didn't get END_OBJECT, got [{}] instead", token);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("allocation command is malformed, got token " + token);
|
throw new ElasticsearchParseException("allocation command is malformed, got [{}] instead", token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return commands;
|
return commands;
|
||||||
|
|
|
@ -83,20 +83,20 @@ public class CancelAllocationCommand implements AllocationCommand {
|
||||||
} else if ("allow_primary".equals(currentFieldName) || "allowPrimary".equals(currentFieldName)) {
|
} else if ("allow_primary".equals(currentFieldName) || "allowPrimary".equals(currentFieldName)) {
|
||||||
allowPrimary = parser.booleanValue();
|
allowPrimary = parser.booleanValue();
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("[cancel] command does not support field [" + currentFieldName + "]");
|
throw new ElasticsearchParseException("[{}] command does not support field [{}]", NAME, currentFieldName);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("[cancel] command does not support complex json tokens [" + token + "]");
|
throw new ElasticsearchParseException("[{}] command does not support complex json tokens [{}]", NAME, token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (index == null) {
|
if (index == null) {
|
||||||
throw new ElasticsearchParseException("[cancel] command missing the index parameter");
|
throw new ElasticsearchParseException("[{}] command missing the index parameter", NAME);
|
||||||
}
|
}
|
||||||
if (shardId == -1) {
|
if (shardId == -1) {
|
||||||
throw new ElasticsearchParseException("[cancel] command missing the shard parameter");
|
throw new ElasticsearchParseException("[{}] command missing the shard parameter", NAME);
|
||||||
}
|
}
|
||||||
if (nodeId == null) {
|
if (nodeId == null) {
|
||||||
throw new ElasticsearchParseException("[cancel] command missing the node parameter");
|
throw new ElasticsearchParseException("[{}] command missing the node parameter", NAME);
|
||||||
}
|
}
|
||||||
return new CancelAllocationCommand(new ShardId(index, shardId), nodeId, allowPrimary);
|
return new CancelAllocationCommand(new ShardId(index, shardId), nodeId, allowPrimary);
|
||||||
}
|
}
|
||||||
|
|
|
@ -81,23 +81,23 @@ public class MoveAllocationCommand implements AllocationCommand {
|
||||||
} else if ("to_node".equals(currentFieldName) || "toNode".equals(currentFieldName)) {
|
} else if ("to_node".equals(currentFieldName) || "toNode".equals(currentFieldName)) {
|
||||||
toNode = parser.text();
|
toNode = parser.text();
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("[move] command does not support field [" + currentFieldName + "]");
|
throw new ElasticsearchParseException("[{}] command does not support field [{}]", NAME, currentFieldName);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("[move] command does not support complex json tokens [" + token + "]");
|
throw new ElasticsearchParseException("[{}] command does not support complex json tokens [{}]", NAME, token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (index == null) {
|
if (index == null) {
|
||||||
throw new ElasticsearchParseException("[move] command missing the index parameter");
|
throw new ElasticsearchParseException("[{}] command missing the index parameter", NAME);
|
||||||
}
|
}
|
||||||
if (shardId == -1) {
|
if (shardId == -1) {
|
||||||
throw new ElasticsearchParseException("[move] command missing the shard parameter");
|
throw new ElasticsearchParseException("[{}] command missing the shard parameter", NAME);
|
||||||
}
|
}
|
||||||
if (fromNode == null) {
|
if (fromNode == null) {
|
||||||
throw new ElasticsearchParseException("[move] command missing the from_node parameter");
|
throw new ElasticsearchParseException("[{}] command missing the from_node parameter", NAME);
|
||||||
}
|
}
|
||||||
if (toNode == null) {
|
if (toNode == null) {
|
||||||
throw new ElasticsearchParseException("[move] command missing the to_node parameter");
|
throw new ElasticsearchParseException("[{}] command missing the to_node parameter", NAME);
|
||||||
}
|
}
|
||||||
return new MoveAllocationCommand(new ShardId(index, shardId), fromNode, toNode);
|
return new MoveAllocationCommand(new ShardId(index, shardId), fromNode, toNode);
|
||||||
}
|
}
|
||||||
|
|
|
@ -102,7 +102,7 @@ public class DiskThresholdDecider extends AllocationDecider {
|
||||||
}
|
}
|
||||||
if (newLowWatermark != null) {
|
if (newLowWatermark != null) {
|
||||||
if (!validWatermarkSetting(newLowWatermark, CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK)) {
|
if (!validWatermarkSetting(newLowWatermark, CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK)) {
|
||||||
throw new ElasticsearchParseException("Unable to parse low watermark: [" + newLowWatermark + "]");
|
throw new ElasticsearchParseException("unable to parse low watermark [{}]", newLowWatermark);
|
||||||
}
|
}
|
||||||
logger.info("updating [{}] to [{}]", CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, newLowWatermark);
|
logger.info("updating [{}] to [{}]", CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, newLowWatermark);
|
||||||
DiskThresholdDecider.this.freeDiskThresholdLow = 100.0 - thresholdPercentageFromWatermark(newLowWatermark);
|
DiskThresholdDecider.this.freeDiskThresholdLow = 100.0 - thresholdPercentageFromWatermark(newLowWatermark);
|
||||||
|
@ -110,7 +110,7 @@ public class DiskThresholdDecider extends AllocationDecider {
|
||||||
}
|
}
|
||||||
if (newHighWatermark != null) {
|
if (newHighWatermark != null) {
|
||||||
if (!validWatermarkSetting(newHighWatermark, CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK)) {
|
if (!validWatermarkSetting(newHighWatermark, CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK)) {
|
||||||
throw new ElasticsearchParseException("Unable to parse high watermark: [" + newHighWatermark + "]");
|
throw new ElasticsearchParseException("unable to parse high watermark [{}]", newHighWatermark);
|
||||||
}
|
}
|
||||||
logger.info("updating [{}] to [{}]", CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, newHighWatermark);
|
logger.info("updating [{}] to [{}]", CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, newHighWatermark);
|
||||||
DiskThresholdDecider.this.freeDiskThresholdHigh = 100.0 - thresholdPercentageFromWatermark(newHighWatermark);
|
DiskThresholdDecider.this.freeDiskThresholdHigh = 100.0 - thresholdPercentageFromWatermark(newHighWatermark);
|
||||||
|
@ -200,10 +200,10 @@ public class DiskThresholdDecider extends AllocationDecider {
|
||||||
String highWatermark = settings.get(CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "90%");
|
String highWatermark = settings.get(CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "90%");
|
||||||
|
|
||||||
if (!validWatermarkSetting(lowWatermark, CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK)) {
|
if (!validWatermarkSetting(lowWatermark, CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK)) {
|
||||||
throw new ElasticsearchParseException("Unable to parse low watermark: [" + lowWatermark + "]");
|
throw new ElasticsearchParseException("unable to parse low watermark [{}]", lowWatermark);
|
||||||
}
|
}
|
||||||
if (!validWatermarkSetting(highWatermark, CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK)) {
|
if (!validWatermarkSetting(highWatermark, CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK)) {
|
||||||
throw new ElasticsearchParseException("Unable to parse high watermark: [" + highWatermark + "]");
|
throw new ElasticsearchParseException("unable to parse high watermark [{}]", highWatermark);
|
||||||
}
|
}
|
||||||
// Watermark is expressed in terms of used data, but we need "free" data watermark
|
// Watermark is expressed in terms of used data, but we need "free" data watermark
|
||||||
this.freeDiskThresholdLow = 100.0 - thresholdPercentageFromWatermark(lowWatermark);
|
this.freeDiskThresholdLow = 100.0 - thresholdPercentageFromWatermark(lowWatermark);
|
||||||
|
@ -309,7 +309,7 @@ public class DiskThresholdDecider extends AllocationDecider {
|
||||||
double usedDiskPercentage = usage.getUsedDiskAsPercentage();
|
double usedDiskPercentage = usage.getUsedDiskAsPercentage();
|
||||||
long freeBytes = usage.getFreeBytes();
|
long freeBytes = usage.getFreeBytes();
|
||||||
if (logger.isTraceEnabled()) {
|
if (logger.isTraceEnabled()) {
|
||||||
logger.trace("Node [{}] has {}% used disk", node.nodeId(), usedDiskPercentage);
|
logger.trace("node [{}] has {}% used disk", node.nodeId(), usedDiskPercentage);
|
||||||
}
|
}
|
||||||
|
|
||||||
// a flag for whether the primary shard has been previously allocated
|
// a flag for whether the primary shard has been previously allocated
|
||||||
|
@ -320,7 +320,7 @@ public class DiskThresholdDecider extends AllocationDecider {
|
||||||
// If the shard is a replica or has a primary that has already been allocated before, check the low threshold
|
// If the shard is a replica or has a primary that has already been allocated before, check the low threshold
|
||||||
if (!shardRouting.primary() || (shardRouting.primary() && primaryHasBeenAllocated)) {
|
if (!shardRouting.primary() || (shardRouting.primary() && primaryHasBeenAllocated)) {
|
||||||
if (logger.isDebugEnabled()) {
|
if (logger.isDebugEnabled()) {
|
||||||
logger.debug("Less than the required {} free bytes threshold ({} bytes free) on node {}, preventing allocation",
|
logger.debug("less than the required {} free bytes threshold ({} bytes free) on node {}, preventing allocation",
|
||||||
freeBytesThresholdLow, freeBytes, node.nodeId());
|
freeBytesThresholdLow, freeBytes, node.nodeId());
|
||||||
}
|
}
|
||||||
return allocation.decision(Decision.NO, NAME, "less than required [%s] free on node, free: [%s]",
|
return allocation.decision(Decision.NO, NAME, "less than required [%s] free on node, free: [%s]",
|
||||||
|
@ -329,7 +329,7 @@ public class DiskThresholdDecider extends AllocationDecider {
|
||||||
// Allow the shard to be allocated because it is primary that
|
// Allow the shard to be allocated because it is primary that
|
||||||
// has never been allocated if it's under the high watermark
|
// has never been allocated if it's under the high watermark
|
||||||
if (logger.isDebugEnabled()) {
|
if (logger.isDebugEnabled()) {
|
||||||
logger.debug("Less than the required {} free bytes threshold ({} bytes free) on node {}, " +
|
logger.debug("less than the required {} free bytes threshold ({} bytes free) on node {}, " +
|
||||||
"but allowing allocation because primary has never been allocated",
|
"but allowing allocation because primary has never been allocated",
|
||||||
freeBytesThresholdLow, freeBytes, node.nodeId());
|
freeBytesThresholdLow, freeBytes, node.nodeId());
|
||||||
}
|
}
|
||||||
|
@ -338,7 +338,7 @@ public class DiskThresholdDecider extends AllocationDecider {
|
||||||
// Even though the primary has never been allocated, the node is
|
// Even though the primary has never been allocated, the node is
|
||||||
// above the high watermark, so don't allow allocating the shard
|
// above the high watermark, so don't allow allocating the shard
|
||||||
if (logger.isDebugEnabled()) {
|
if (logger.isDebugEnabled()) {
|
||||||
logger.debug("Less than the required {} free bytes threshold ({} bytes free) on node {}, " +
|
logger.debug("less than the required {} free bytes threshold ({} bytes free) on node {}, " +
|
||||||
"preventing allocation even though primary has never been allocated",
|
"preventing allocation even though primary has never been allocated",
|
||||||
freeBytesThresholdHigh, freeBytes, node.nodeId());
|
freeBytesThresholdHigh, freeBytes, node.nodeId());
|
||||||
}
|
}
|
||||||
|
@ -352,7 +352,7 @@ public class DiskThresholdDecider extends AllocationDecider {
|
||||||
// If the shard is a replica or has a primary that has already been allocated before, check the low threshold
|
// If the shard is a replica or has a primary that has already been allocated before, check the low threshold
|
||||||
if (!shardRouting.primary() || (shardRouting.primary() && primaryHasBeenAllocated)) {
|
if (!shardRouting.primary() || (shardRouting.primary() && primaryHasBeenAllocated)) {
|
||||||
if (logger.isDebugEnabled()) {
|
if (logger.isDebugEnabled()) {
|
||||||
logger.debug("More than the allowed {} used disk threshold ({} used) on node [{}], preventing allocation",
|
logger.debug("more than the allowed {} used disk threshold ({} used) on node [{}], preventing allocation",
|
||||||
Strings.format1Decimals(usedDiskThresholdLow, "%"),
|
Strings.format1Decimals(usedDiskThresholdLow, "%"),
|
||||||
Strings.format1Decimals(usedDiskPercentage, "%"), node.nodeId());
|
Strings.format1Decimals(usedDiskPercentage, "%"), node.nodeId());
|
||||||
}
|
}
|
||||||
|
@ -362,7 +362,7 @@ public class DiskThresholdDecider extends AllocationDecider {
|
||||||
// Allow the shard to be allocated because it is primary that
|
// Allow the shard to be allocated because it is primary that
|
||||||
// has never been allocated if it's under the high watermark
|
// has never been allocated if it's under the high watermark
|
||||||
if (logger.isDebugEnabled()) {
|
if (logger.isDebugEnabled()) {
|
||||||
logger.debug("More than the allowed {} used disk threshold ({} used) on node [{}], " +
|
logger.debug("more than the allowed {} used disk threshold ({} used) on node [{}], " +
|
||||||
"but allowing allocation because primary has never been allocated",
|
"but allowing allocation because primary has never been allocated",
|
||||||
Strings.format1Decimals(usedDiskThresholdLow, "%"),
|
Strings.format1Decimals(usedDiskThresholdLow, "%"),
|
||||||
Strings.format1Decimals(usedDiskPercentage, "%"), node.nodeId());
|
Strings.format1Decimals(usedDiskPercentage, "%"), node.nodeId());
|
||||||
|
@ -372,7 +372,7 @@ public class DiskThresholdDecider extends AllocationDecider {
|
||||||
// Even though the primary has never been allocated, the node is
|
// Even though the primary has never been allocated, the node is
|
||||||
// above the high watermark, so don't allow allocating the shard
|
// above the high watermark, so don't allow allocating the shard
|
||||||
if (logger.isDebugEnabled()) {
|
if (logger.isDebugEnabled()) {
|
||||||
logger.debug("Less than the required {} free bytes threshold ({} bytes free) on node {}, " +
|
logger.debug("less than the required {} free bytes threshold ({} bytes free) on node {}, " +
|
||||||
"preventing allocation even though primary has never been allocated",
|
"preventing allocation even though primary has never been allocated",
|
||||||
Strings.format1Decimals(freeDiskThresholdHigh, "%"),
|
Strings.format1Decimals(freeDiskThresholdHigh, "%"),
|
||||||
Strings.format1Decimals(freeDiskPercentage, "%"), node.nodeId());
|
Strings.format1Decimals(freeDiskPercentage, "%"), node.nodeId());
|
||||||
|
@ -389,13 +389,13 @@ public class DiskThresholdDecider extends AllocationDecider {
|
||||||
double freeSpaceAfterShard = freeDiskPercentageAfterShardAssigned(usage, shardSize);
|
double freeSpaceAfterShard = freeDiskPercentageAfterShardAssigned(usage, shardSize);
|
||||||
long freeBytesAfterShard = freeBytes - shardSize;
|
long freeBytesAfterShard = freeBytes - shardSize;
|
||||||
if (freeBytesAfterShard < freeBytesThresholdHigh.bytes()) {
|
if (freeBytesAfterShard < freeBytesThresholdHigh.bytes()) {
|
||||||
logger.warn("After allocating, node [{}] would have less than the required {} free bytes threshold ({} bytes free), preventing allocation",
|
logger.warn("after allocating, node [{}] would have less than the required {} free bytes threshold ({} bytes free), preventing allocation",
|
||||||
node.nodeId(), freeBytesThresholdHigh, freeBytesAfterShard);
|
node.nodeId(), freeBytesThresholdHigh, freeBytesAfterShard);
|
||||||
return allocation.decision(Decision.NO, NAME, "after allocation less than required [%s] free on node, free: [%s]",
|
return allocation.decision(Decision.NO, NAME, "after allocation less than required [%s] free on node, free: [%s]",
|
||||||
freeBytesThresholdLow, new ByteSizeValue(freeBytesAfterShard));
|
freeBytesThresholdLow, new ByteSizeValue(freeBytesAfterShard));
|
||||||
}
|
}
|
||||||
if (freeSpaceAfterShard < freeDiskThresholdHigh) {
|
if (freeSpaceAfterShard < freeDiskThresholdHigh) {
|
||||||
logger.warn("After allocating, node [{}] would have more than the allowed {} free disk threshold ({} free), preventing allocation",
|
logger.warn("after allocating, node [{}] would have more than the allowed {} free disk threshold ({} free), preventing allocation",
|
||||||
node.nodeId(), Strings.format1Decimals(freeDiskThresholdHigh, "%"), Strings.format1Decimals(freeSpaceAfterShard, "%"));
|
node.nodeId(), Strings.format1Decimals(freeDiskThresholdHigh, "%"), Strings.format1Decimals(freeSpaceAfterShard, "%"));
|
||||||
return allocation.decision(Decision.NO, NAME, "after allocation more than allowed [%s%%] used disk on node, free: [%s%%]",
|
return allocation.decision(Decision.NO, NAME, "after allocation more than allowed [%s%%] used disk on node, free: [%s%%]",
|
||||||
usedDiskThresholdLow, freeSpaceAfterShard);
|
usedDiskThresholdLow, freeSpaceAfterShard);
|
||||||
|
@ -415,11 +415,11 @@ public class DiskThresholdDecider extends AllocationDecider {
|
||||||
double freeDiskPercentage = usage.getFreeDiskAsPercentage();
|
double freeDiskPercentage = usage.getFreeDiskAsPercentage();
|
||||||
long freeBytes = usage.getFreeBytes();
|
long freeBytes = usage.getFreeBytes();
|
||||||
if (logger.isDebugEnabled()) {
|
if (logger.isDebugEnabled()) {
|
||||||
logger.debug("Node [{}] has {}% free disk ({} bytes)", node.nodeId(), freeDiskPercentage, freeBytes);
|
logger.debug("node [{}] has {}% free disk ({} bytes)", node.nodeId(), freeDiskPercentage, freeBytes);
|
||||||
}
|
}
|
||||||
if (freeBytes < freeBytesThresholdHigh.bytes()) {
|
if (freeBytes < freeBytesThresholdHigh.bytes()) {
|
||||||
if (logger.isDebugEnabled()) {
|
if (logger.isDebugEnabled()) {
|
||||||
logger.debug("Less than the required {} free bytes threshold ({} bytes free) on node {}, shard cannot remain",
|
logger.debug("less than the required {} free bytes threshold ({} bytes free) on node {}, shard cannot remain",
|
||||||
freeBytesThresholdHigh, freeBytes, node.nodeId());
|
freeBytesThresholdHigh, freeBytes, node.nodeId());
|
||||||
}
|
}
|
||||||
return allocation.decision(Decision.NO, NAME, "after allocation less than required [%s] free on node, free: [%s]",
|
return allocation.decision(Decision.NO, NAME, "after allocation less than required [%s] free on node, free: [%s]",
|
||||||
|
@ -427,7 +427,7 @@ public class DiskThresholdDecider extends AllocationDecider {
|
||||||
}
|
}
|
||||||
if (freeDiskPercentage < freeDiskThresholdHigh) {
|
if (freeDiskPercentage < freeDiskThresholdHigh) {
|
||||||
if (logger.isDebugEnabled()) {
|
if (logger.isDebugEnabled()) {
|
||||||
logger.debug("Less than the required {}% free disk threshold ({}% free) on node {}, shard cannot remain",
|
logger.debug("less than the required {}% free disk threshold ({}% free) on node {}, shard cannot remain",
|
||||||
freeDiskThresholdHigh, freeDiskPercentage, node.nodeId());
|
freeDiskThresholdHigh, freeDiskPercentage, node.nodeId());
|
||||||
}
|
}
|
||||||
return allocation.decision(Decision.NO, NAME, "after allocation less than required [%s%%] free disk on node, free: [%s%%]",
|
return allocation.decision(Decision.NO, NAME, "after allocation less than required [%s%%] free disk on node, free: [%s%%]",
|
||||||
|
@ -446,7 +446,7 @@ public class DiskThresholdDecider extends AllocationDecider {
|
||||||
// use the average usage for all nodes as the usage for this node
|
// use the average usage for all nodes as the usage for this node
|
||||||
usage = averageUsage(node, usages);
|
usage = averageUsage(node, usages);
|
||||||
if (logger.isDebugEnabled()) {
|
if (logger.isDebugEnabled()) {
|
||||||
logger.debug("Unable to determine disk usage for {}, defaulting to average across nodes [{} total] [{} free] [{}% free]",
|
logger.debug("unable to determine disk usage for {}, defaulting to average across nodes [{} total] [{} free] [{}% free]",
|
||||||
node.nodeId(), usage.getTotalBytes(), usage.getFreeBytes(), usage.getFreeDiskAsPercentage());
|
node.nodeId(), usage.getTotalBytes(), usage.getFreeBytes(), usage.getFreeDiskAsPercentage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -552,7 +552,7 @@ public class DiskThresholdDecider extends AllocationDecider {
|
||||||
// Allow allocation regardless if only a single node is available
|
// Allow allocation regardless if only a single node is available
|
||||||
if (allocation.nodes().size() <= 1) {
|
if (allocation.nodes().size() <= 1) {
|
||||||
if (logger.isTraceEnabled()) {
|
if (logger.isTraceEnabled()) {
|
||||||
logger.trace("Only a single node is present, allowing allocation");
|
logger.trace("only a single node is present, allowing allocation");
|
||||||
}
|
}
|
||||||
return allocation.decision(Decision.YES, NAME, "only a single node is present");
|
return allocation.decision(Decision.YES, NAME, "only a single node is present");
|
||||||
}
|
}
|
||||||
|
@ -561,7 +561,7 @@ public class DiskThresholdDecider extends AllocationDecider {
|
||||||
final ClusterInfo clusterInfo = allocation.clusterInfo();
|
final ClusterInfo clusterInfo = allocation.clusterInfo();
|
||||||
if (clusterInfo == null) {
|
if (clusterInfo == null) {
|
||||||
if (logger.isTraceEnabled()) {
|
if (logger.isTraceEnabled()) {
|
||||||
logger.trace("Cluster info unavailable for disk threshold decider, allowing allocation.");
|
logger.trace("cluster info unavailable for disk threshold decider, allowing allocation.");
|
||||||
}
|
}
|
||||||
return allocation.decision(Decision.YES, NAME, "cluster info unavailable");
|
return allocation.decision(Decision.YES, NAME, "cluster info unavailable");
|
||||||
}
|
}
|
||||||
|
@ -570,7 +570,7 @@ public class DiskThresholdDecider extends AllocationDecider {
|
||||||
// Fail open if there are no disk usages available
|
// Fail open if there are no disk usages available
|
||||||
if (usages.isEmpty()) {
|
if (usages.isEmpty()) {
|
||||||
if (logger.isTraceEnabled()) {
|
if (logger.isTraceEnabled()) {
|
||||||
logger.trace("Unable to determine disk usages for disk-aware allocation, allowing allocation");
|
logger.trace("unable to determine disk usages for disk-aware allocation, allowing allocation");
|
||||||
}
|
}
|
||||||
return allocation.decision(Decision.YES, NAME, "disk usages unavailable");
|
return allocation.decision(Decision.YES, NAME, "disk usages unavailable");
|
||||||
}
|
}
|
||||||
|
|
|
@ -369,10 +369,10 @@ public class GeoUtils {
|
||||||
throw new ElasticsearchParseException("geohash must be a string");
|
throw new ElasticsearchParseException("geohash must be a string");
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("field must be either '" + LATITUDE + "', '" + LONGITUDE + "' or '" + GEOHASH + "'");
|
throw new ElasticsearchParseException("field must be either [{}], [{}] or [{}]", LATITUDE, LONGITUDE, GEOHASH);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("Token '"+parser.currentToken()+"' not allowed");
|
throw new ElasticsearchParseException("token [{}] not allowed", parser.currentToken());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -383,9 +383,9 @@ public class GeoUtils {
|
||||||
return point.resetFromGeoHash(geohash);
|
return point.resetFromGeoHash(geohash);
|
||||||
}
|
}
|
||||||
} else if (Double.isNaN(lat)) {
|
} else if (Double.isNaN(lat)) {
|
||||||
throw new ElasticsearchParseException("field [" + LATITUDE + "] missing");
|
throw new ElasticsearchParseException("field [{}] missing", LATITUDE);
|
||||||
} else if (Double.isNaN(lon)) {
|
} else if (Double.isNaN(lon)) {
|
||||||
throw new ElasticsearchParseException("field [" + LONGITUDE + "] missing");
|
throw new ElasticsearchParseException("field [{}] missing", LONGITUDE);
|
||||||
} else {
|
} else {
|
||||||
return point.reset(lat, lon);
|
return point.reset(lat, lon);
|
||||||
}
|
}
|
||||||
|
@ -403,7 +403,7 @@ public class GeoUtils {
|
||||||
throw new ElasticsearchParseException("only two values allowed");
|
throw new ElasticsearchParseException("only two values allowed");
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("Numeric value expected");
|
throw new ElasticsearchParseException("numeric value expected");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return point.reset(lat, lon);
|
return point.reset(lat, lon);
|
||||||
|
|
|
@ -721,7 +721,7 @@ public abstract class ShapeBuilder implements ToXContent {
|
||||||
if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
|
if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
|
||||||
return null;
|
return null;
|
||||||
} else if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
|
} else if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
|
||||||
throw new ElasticsearchParseException("Shape must be an object consisting of type and coordinates");
|
throw new ElasticsearchParseException("shape must be an object consisting of type and coordinates");
|
||||||
}
|
}
|
||||||
|
|
||||||
GeoShapeType shapeType = null;
|
GeoShapeType shapeType = null;
|
||||||
|
@ -758,14 +758,13 @@ public abstract class ShapeBuilder implements ToXContent {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (shapeType == null) {
|
if (shapeType == null) {
|
||||||
throw new ElasticsearchParseException("Shape type not included");
|
throw new ElasticsearchParseException("shape type not included");
|
||||||
} else if (node == null && GeoShapeType.GEOMETRYCOLLECTION != shapeType) {
|
} else if (node == null && GeoShapeType.GEOMETRYCOLLECTION != shapeType) {
|
||||||
throw new ElasticsearchParseException("Coordinates not included");
|
throw new ElasticsearchParseException("coordinates not included");
|
||||||
} else if (geometryCollections == null && GeoShapeType.GEOMETRYCOLLECTION == shapeType) {
|
} else if (geometryCollections == null && GeoShapeType.GEOMETRYCOLLECTION == shapeType) {
|
||||||
throw new ElasticsearchParseException("geometries not included");
|
throw new ElasticsearchParseException("geometries not included");
|
||||||
} else if (radius != null && GeoShapeType.CIRCLE != shapeType) {
|
} else if (radius != null && GeoShapeType.CIRCLE != shapeType) {
|
||||||
throw new ElasticsearchParseException("Field [" + CircleBuilder.FIELD_RADIUS + "] is supported for [" + CircleBuilder.TYPE
|
throw new ElasticsearchParseException("field [{}] is supported for [{}] only", CircleBuilder.FIELD_RADIUS, CircleBuilder.TYPE);
|
||||||
+ "] only");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (shapeType) {
|
switch (shapeType) {
|
||||||
|
@ -779,14 +778,13 @@ public abstract class ShapeBuilder implements ToXContent {
|
||||||
case ENVELOPE: return parseEnvelope(node, requestedOrientation);
|
case ENVELOPE: return parseEnvelope(node, requestedOrientation);
|
||||||
case GEOMETRYCOLLECTION: return geometryCollections;
|
case GEOMETRYCOLLECTION: return geometryCollections;
|
||||||
default:
|
default:
|
||||||
throw new ElasticsearchParseException("Shape type [" + shapeType + "] not included");
|
throw new ElasticsearchParseException("shape type [{}] not included", shapeType);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected static void validatePointNode(CoordinateNode node) {
|
protected static void validatePointNode(CoordinateNode node) {
|
||||||
if (node.isEmpty()) {
|
if (node.isEmpty()) {
|
||||||
throw new ElasticsearchParseException("Invalid number of points (0) provided when expecting a single coordinate "
|
throw new ElasticsearchParseException("invalid number of points (0) provided when expecting a single coordinate ([lat, lng])");
|
||||||
+ "([lat, lng])");
|
|
||||||
} else if (node.coordinate == null) {
|
} else if (node.coordinate == null) {
|
||||||
if (node.children.isEmpty() == false) {
|
if (node.children.isEmpty() == false) {
|
||||||
throw new ElasticsearchParseException("multipoint data provided when single point data expected.");
|
throw new ElasticsearchParseException("multipoint data provided when single point data expected.");
|
||||||
|
@ -806,8 +804,8 @@ public abstract class ShapeBuilder implements ToXContent {
|
||||||
protected static EnvelopeBuilder parseEnvelope(CoordinateNode coordinates, Orientation orientation) {
|
protected static EnvelopeBuilder parseEnvelope(CoordinateNode coordinates, Orientation orientation) {
|
||||||
// validate the coordinate array for envelope type
|
// validate the coordinate array for envelope type
|
||||||
if (coordinates.children.size() != 2) {
|
if (coordinates.children.size() != 2) {
|
||||||
throw new ElasticsearchParseException("Invalid number of points (" + coordinates.children.size() + ") provided for " +
|
throw new ElasticsearchParseException("invalid number of points [{}] provided for " +
|
||||||
"geo_shape ('envelope') when expecting an array of 2 coordinates");
|
"geo_shape [{}] when expecting an array of 2 coordinates", coordinates.children.size(), GeoShapeType.ENVELOPE.shapename);
|
||||||
}
|
}
|
||||||
// verify coordinate bounds, correct if necessary
|
// verify coordinate bounds, correct if necessary
|
||||||
Coordinate uL = coordinates.children.get(0).coordinate;
|
Coordinate uL = coordinates.children.get(0).coordinate;
|
||||||
|
@ -826,7 +824,7 @@ public abstract class ShapeBuilder implements ToXContent {
|
||||||
throw new ElasticsearchParseException("single coordinate found when expecting an array of " +
|
throw new ElasticsearchParseException("single coordinate found when expecting an array of " +
|
||||||
"coordinates. change type to point or change data to an array of >0 coordinates");
|
"coordinates. change type to point or change data to an array of >0 coordinates");
|
||||||
}
|
}
|
||||||
throw new ElasticsearchParseException("No data provided for multipoint object when expecting " +
|
throw new ElasticsearchParseException("no data provided for multipoint object when expecting " +
|
||||||
">0 points (e.g., [[lat, lng]] or [[lat, lng], ...])");
|
">0 points (e.g., [[lat, lng]] or [[lat, lng], ...])");
|
||||||
} else {
|
} else {
|
||||||
for (CoordinateNode point : coordinates.children) {
|
for (CoordinateNode point : coordinates.children) {
|
||||||
|
@ -852,8 +850,7 @@ public abstract class ShapeBuilder implements ToXContent {
|
||||||
* LineStringBuilder should throw a graceful exception if < 2 coordinates/points are provided
|
* LineStringBuilder should throw a graceful exception if < 2 coordinates/points are provided
|
||||||
*/
|
*/
|
||||||
if (coordinates.children.size() < 2) {
|
if (coordinates.children.size() < 2) {
|
||||||
throw new ElasticsearchParseException("Invalid number of points in LineString (found " +
|
throw new ElasticsearchParseException("invalid number of points in LineString (found [{}] - must be >= 2)", coordinates.children.size());
|
||||||
coordinates.children.size() + " - must be >= 2)");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
LineStringBuilder line = newLineString();
|
LineStringBuilder line = newLineString();
|
||||||
|
@ -884,19 +881,17 @@ public abstract class ShapeBuilder implements ToXContent {
|
||||||
" No coordinate array provided" : " Found a single coordinate when expecting a coordinate array";
|
" No coordinate array provided" : " Found a single coordinate when expecting a coordinate array";
|
||||||
throw new ElasticsearchParseException(error);
|
throw new ElasticsearchParseException(error);
|
||||||
} else if (coordinates.children.size() < 4) {
|
} else if (coordinates.children.size() < 4) {
|
||||||
throw new ElasticsearchParseException("Invalid number of points in LinearRing (found " +
|
throw new ElasticsearchParseException("invalid number of points in LinearRing (found [{}] - must be >= 4)", coordinates.children.size());
|
||||||
coordinates.children.size() + " - must be >= 4)");
|
|
||||||
} else if (!coordinates.children.get(0).coordinate.equals(
|
} else if (!coordinates.children.get(0).coordinate.equals(
|
||||||
coordinates.children.get(coordinates.children.size() - 1).coordinate)) {
|
coordinates.children.get(coordinates.children.size() - 1).coordinate)) {
|
||||||
throw new ElasticsearchParseException("Invalid LinearRing found (coordinates are not closed)");
|
throw new ElasticsearchParseException("invalid LinearRing found (coordinates are not closed)");
|
||||||
}
|
}
|
||||||
return parseLineString(coordinates);
|
return parseLineString(coordinates);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected static PolygonBuilder parsePolygon(CoordinateNode coordinates, Orientation orientation) {
|
protected static PolygonBuilder parsePolygon(CoordinateNode coordinates, Orientation orientation) {
|
||||||
if (coordinates.children == null || coordinates.children.isEmpty()) {
|
if (coordinates.children == null || coordinates.children.isEmpty()) {
|
||||||
throw new ElasticsearchParseException("Invalid LinearRing provided for type polygon. Linear ring must be an array of " +
|
throw new ElasticsearchParseException("invalid LinearRing provided for type polygon. Linear ring must be an array of coordinates");
|
||||||
"coordinates");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
LineStringBuilder shell = parseLinearRing(coordinates.children.get(0));
|
LineStringBuilder shell = parseLinearRing(coordinates.children.get(0));
|
||||||
|
@ -924,7 +919,7 @@ public abstract class ShapeBuilder implements ToXContent {
|
||||||
*/
|
*/
|
||||||
protected static GeometryCollectionBuilder parseGeometries(XContentParser parser, Orientation orientation) throws IOException {
|
protected static GeometryCollectionBuilder parseGeometries(XContentParser parser, Orientation orientation) throws IOException {
|
||||||
if (parser.currentToken() != XContentParser.Token.START_ARRAY) {
|
if (parser.currentToken() != XContentParser.Token.START_ARRAY) {
|
||||||
throw new ElasticsearchParseException("Geometries must be an array of geojson objects");
|
throw new ElasticsearchParseException("geometries must be an array of geojson objects");
|
||||||
}
|
}
|
||||||
|
|
||||||
XContentParser.Token token = parser.nextToken();
|
XContentParser.Token token = parser.nextToken();
|
||||||
|
|
|
@ -58,7 +58,7 @@ public class DateMathParser {
|
||||||
try {
|
try {
|
||||||
time = now.call();
|
time = now.call();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new ElasticsearchParseException("Could not read the current timestamp", e);
|
throw new ElasticsearchParseException("could not read the current timestamp", e);
|
||||||
}
|
}
|
||||||
mathString = text.substring("now".length());
|
mathString = text.substring("now".length());
|
||||||
} else {
|
} else {
|
||||||
|
@ -95,12 +95,12 @@ public class DateMathParser {
|
||||||
} else if (c == '-') {
|
} else if (c == '-') {
|
||||||
sign = -1;
|
sign = -1;
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("operator not supported for date math [" + mathString + "]");
|
throw new ElasticsearchParseException("operator not supported for date math [{}]", mathString);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (i >= mathString.length()) {
|
if (i >= mathString.length()) {
|
||||||
throw new ElasticsearchParseException("truncated date math [" + mathString + "]");
|
throw new ElasticsearchParseException("truncated date math [{}]", mathString);
|
||||||
}
|
}
|
||||||
|
|
||||||
final int num;
|
final int num;
|
||||||
|
@ -112,13 +112,13 @@ public class DateMathParser {
|
||||||
i++;
|
i++;
|
||||||
}
|
}
|
||||||
if (i >= mathString.length()) {
|
if (i >= mathString.length()) {
|
||||||
throw new ElasticsearchParseException("truncated date math [" + mathString + "]");
|
throw new ElasticsearchParseException("truncated date math [{}]", mathString);
|
||||||
}
|
}
|
||||||
num = Integer.parseInt(mathString.substring(numFrom, i));
|
num = Integer.parseInt(mathString.substring(numFrom, i));
|
||||||
}
|
}
|
||||||
if (round) {
|
if (round) {
|
||||||
if (num != 1) {
|
if (num != 1) {
|
||||||
throw new ElasticsearchParseException("rounding `/` can only be used on single unit types [" + mathString + "]");
|
throw new ElasticsearchParseException("rounding `/` can only be used on single unit types [{}]", mathString);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
char unit = mathString.charAt(i++);
|
char unit = mathString.charAt(i++);
|
||||||
|
@ -175,7 +175,7 @@ public class DateMathParser {
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw new ElasticsearchParseException("unit [" + unit + "] not supported for date math [" + mathString + "]");
|
throw new ElasticsearchParseException("unit [{}] not supported for date math [{}]", unit, mathString);
|
||||||
}
|
}
|
||||||
if (propertyToRound != null) {
|
if (propertyToRound != null) {
|
||||||
if (roundUp) {
|
if (roundUp) {
|
||||||
|
@ -200,7 +200,7 @@ public class DateMathParser {
|
||||||
return parser.parseMillis(value);
|
return parser.parseMillis(value);
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
|
|
||||||
throw new ElasticsearchParseException("failed to parse date field [" + value + "] with format [" + dateTimeFormatter.format() + "]", e);
|
throw new ElasticsearchParseException("failed to parse date field [{}] with format [{}]", e, value, dateTimeFormatter.format());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -62,7 +62,7 @@ public abstract class XContentSettingsLoader implements SettingsLoader {
|
||||||
return settings;
|
return settings;
|
||||||
}
|
}
|
||||||
if (token != XContentParser.Token.START_OBJECT) {
|
if (token != XContentParser.Token.START_OBJECT) {
|
||||||
throw new ElasticsearchParseException("malformed, expected settings to start with 'object', instead was [" + token + "]");
|
throw new ElasticsearchParseException("malformed, expected settings to start with 'object', instead was [{}]", token);
|
||||||
}
|
}
|
||||||
serializeObject(settings, sb, path, jp, null);
|
serializeObject(settings, sb, path, jp, null);
|
||||||
return settings;
|
return settings;
|
||||||
|
|
|
@ -214,14 +214,14 @@ public class ByteSizeValue implements Streamable {
|
||||||
} else {
|
} else {
|
||||||
// Missing units:
|
// Missing units:
|
||||||
if (Settings.getSettingsRequireUnits()) {
|
if (Settings.getSettingsRequireUnits()) {
|
||||||
throw new ElasticsearchParseException("Failed to parse setting [" + settingName + "] with value [" + sValue + "] as a size in bytes: unit is missing or unrecognized") ;
|
throw new ElasticsearchParseException("failed to parse setting [{}] with value [{}] as a size in bytes: unit is missing or unrecognized", settingName, sValue);
|
||||||
} else {
|
} else {
|
||||||
// Leniency default to bytes:
|
// Leniency default to bytes:
|
||||||
bytes = Long.parseLong(sValue);
|
bytes = Long.parseLong(sValue);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (NumberFormatException e) {
|
} catch (NumberFormatException e) {
|
||||||
throw new ElasticsearchParseException("Failed to parse [" + sValue + "]", e);
|
throw new ElasticsearchParseException("failed to parse [{}]", e, sValue);
|
||||||
}
|
}
|
||||||
return new ByteSizeValue(bytes, ByteSizeUnit.BYTES);
|
return new ByteSizeValue(bytes, ByteSizeUnit.BYTES);
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,11 +40,11 @@ public enum MemorySizeValue {
|
||||||
try {
|
try {
|
||||||
final double percent = Double.parseDouble(percentAsString);
|
final double percent = Double.parseDouble(percentAsString);
|
||||||
if (percent < 0 || percent > 100) {
|
if (percent < 0 || percent > 100) {
|
||||||
throw new ElasticsearchParseException("Percentage should be in [0-100], got " + percentAsString);
|
throw new ElasticsearchParseException("percentage should be in [0-100], got [{}]", percentAsString);
|
||||||
}
|
}
|
||||||
return new ByteSizeValue((long) ((percent / 100) * JvmInfo.jvmInfo().getMem().getHeapMax().bytes()), ByteSizeUnit.BYTES);
|
return new ByteSizeValue((long) ((percent / 100) * JvmInfo.jvmInfo().getMem().getHeapMax().bytes()), ByteSizeUnit.BYTES);
|
||||||
} catch (NumberFormatException e) {
|
} catch (NumberFormatException e) {
|
||||||
throw new ElasticsearchParseException("Failed to parse [" + percentAsString + "] as a double", e);
|
throw new ElasticsearchParseException("failed to parse [{}] as a double", e, percentAsString);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return parseBytesSizeValue(sValue, settingName);
|
return parseBytesSizeValue(sValue, settingName);
|
||||||
|
|
|
@ -55,21 +55,21 @@ public class RatioValue {
|
||||||
try {
|
try {
|
||||||
final double percent = Double.parseDouble(percentAsString);
|
final double percent = Double.parseDouble(percentAsString);
|
||||||
if (percent < 0 || percent > 100) {
|
if (percent < 0 || percent > 100) {
|
||||||
throw new ElasticsearchParseException("Percentage should be in [0-100], got " + percentAsString);
|
throw new ElasticsearchParseException("Percentage should be in [0-100], got [{}]", percentAsString);
|
||||||
}
|
}
|
||||||
return new RatioValue(Math.abs(percent));
|
return new RatioValue(Math.abs(percent));
|
||||||
} catch (NumberFormatException e) {
|
} catch (NumberFormatException e) {
|
||||||
throw new ElasticsearchParseException("Failed to parse [" + percentAsString + "] as a double", e);
|
throw new ElasticsearchParseException("Failed to parse [{}] as a double", e, percentAsString);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
double ratio = Double.parseDouble(sValue);
|
double ratio = Double.parseDouble(sValue);
|
||||||
if (ratio < 0 || ratio > 1.0) {
|
if (ratio < 0 || ratio > 1.0) {
|
||||||
throw new ElasticsearchParseException("Ratio should be in [0-1.0], got " + ratio);
|
throw new ElasticsearchParseException("Ratio should be in [0-1.0], got [{}]", ratio);
|
||||||
}
|
}
|
||||||
return new RatioValue(100.0 * Math.abs(ratio));
|
return new RatioValue(100.0 * Math.abs(ratio));
|
||||||
} catch (NumberFormatException e) {
|
} catch (NumberFormatException e) {
|
||||||
throw new ElasticsearchParseException("Invalid ratio or percentage: [" + sValue + "]");
|
throw new ElasticsearchParseException("Invalid ratio or percentage [{}]", sValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -190,7 +190,7 @@ public class SizeValue implements Streamable {
|
||||||
singles = Long.parseLong(sValue);
|
singles = Long.parseLong(sValue);
|
||||||
}
|
}
|
||||||
} catch (NumberFormatException e) {
|
} catch (NumberFormatException e) {
|
||||||
throw new ElasticsearchParseException("Failed to parse [" + sValue + "]", e);
|
throw new ElasticsearchParseException("failed to parse [{}]", e, sValue);
|
||||||
}
|
}
|
||||||
return new SizeValue(singles, SizeUnit.SINGLE);
|
return new SizeValue(singles, SizeUnit.SINGLE);
|
||||||
}
|
}
|
||||||
|
|
|
@ -259,7 +259,7 @@ public class TimeValue implements Streamable {
|
||||||
} else {
|
} else {
|
||||||
if (Settings.getSettingsRequireUnits()) {
|
if (Settings.getSettingsRequireUnits()) {
|
||||||
// Missing units:
|
// Missing units:
|
||||||
throw new ElasticsearchParseException("Failed to parse setting [" + settingName + "] with value [" + sValue + "] as a time value: unit is missing or unrecognized");
|
throw new ElasticsearchParseException("Failed to parse setting [{}] with value [{}] as a time value: unit is missing or unrecognized", settingName, sValue);
|
||||||
} else {
|
} else {
|
||||||
// Leniency default to msec for bwc:
|
// Leniency default to msec for bwc:
|
||||||
millis = Long.parseLong(sValue);
|
millis = Long.parseLong(sValue);
|
||||||
|
@ -267,7 +267,7 @@ public class TimeValue implements Streamable {
|
||||||
}
|
}
|
||||||
return new TimeValue(millis, TimeUnit.MILLISECONDS);
|
return new TimeValue(millis, TimeUnit.MILLISECONDS);
|
||||||
} catch (NumberFormatException e) {
|
} catch (NumberFormatException e) {
|
||||||
throw new ElasticsearchParseException("Failed to parse [" + sValue + "]", e);
|
throw new ElasticsearchParseException("Failed to parse [{}]", e, sValue);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -33,11 +33,11 @@ public class IndexException extends ElasticsearchException {
|
||||||
|
|
||||||
private final Index index;
|
private final Index index;
|
||||||
|
|
||||||
public IndexException(Index index, String msg) {
|
public IndexException(Index index, String msg, Object... args) {
|
||||||
this(index, msg, null);
|
this(index, msg, null, args);
|
||||||
}
|
}
|
||||||
|
|
||||||
public IndexException(Index index, String msg, Throwable cause) {
|
public IndexException(Index index, String msg, Throwable cause, Object... args) {
|
||||||
super(msg, cause);
|
super(msg, cause);
|
||||||
this.index = index;
|
this.index = index;
|
||||||
}
|
}
|
||||||
|
|
|
@ -355,7 +355,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
||||||
String name = parser.text();
|
String name = parser.text();
|
||||||
ContextMapping mapping = fieldType().getContextMapping().get(name);
|
ContextMapping mapping = fieldType().getContextMapping().get(name);
|
||||||
if (mapping == null) {
|
if (mapping == null) {
|
||||||
throw new ElasticsearchParseException("context [" + name + "] is not defined");
|
throw new ElasticsearchParseException("context [{}] is not defined", name);
|
||||||
} else {
|
} else {
|
||||||
token = parser.nextToken();
|
token = parser.nextToken();
|
||||||
configs.put(name, mapping.parseContext(context, parser));
|
configs.put(name, mapping.parseContext(context, parser));
|
||||||
|
|
|
@ -87,12 +87,12 @@ public class TypeParsers {
|
||||||
firstType = type;
|
firstType = type;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new MapperParsingException("No type specified for property [" + propName + "]");
|
throw new MapperParsingException("no type specified for property [" + propName + "]");
|
||||||
}
|
}
|
||||||
|
|
||||||
Mapper.TypeParser typeParser = parserContext.typeParser(type);
|
Mapper.TypeParser typeParser = parserContext.typeParser(type);
|
||||||
if (typeParser == null) {
|
if (typeParser == null) {
|
||||||
throw new MapperParsingException("No handler for type [" + type + "] declared on field [" + fieldName + "]");
|
throw new MapperParsingException("no handler for type [" + type + "] declared on field [" + fieldName + "]");
|
||||||
}
|
}
|
||||||
if (propName.equals(name)) {
|
if (propName.equals(name)) {
|
||||||
mainFieldBuilder = (AbstractFieldMapper.Builder) typeParser.parse(propName, propNode, parserContext);
|
mainFieldBuilder = (AbstractFieldMapper.Builder) typeParser.parse(propName, propNode, parserContext);
|
||||||
|
@ -256,14 +256,14 @@ public class TypeParsers {
|
||||||
|
|
||||||
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
|
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
|
||||||
if (analyzer == null) {
|
if (analyzer == null) {
|
||||||
throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
|
throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
|
||||||
}
|
}
|
||||||
indexAnalyzer = analyzer;
|
indexAnalyzer = analyzer;
|
||||||
iterator.remove();
|
iterator.remove();
|
||||||
} else if (propName.equals("search_analyzer")) {
|
} else if (propName.equals("search_analyzer")) {
|
||||||
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
|
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
|
||||||
if (analyzer == null) {
|
if (analyzer == null) {
|
||||||
throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
|
throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
|
||||||
}
|
}
|
||||||
searchAnalyzer = analyzer;
|
searchAnalyzer = analyzer;
|
||||||
iterator.remove();
|
iterator.remove();
|
||||||
|
@ -313,14 +313,14 @@ public class TypeParsers {
|
||||||
} else if (propNode instanceof Map) {
|
} else if (propNode instanceof Map) {
|
||||||
multiFieldsPropNodes = (Map<String, Object>) propNode;
|
multiFieldsPropNodes = (Map<String, Object>) propNode;
|
||||||
} else {
|
} else {
|
||||||
throw new MapperParsingException("Expected map for property [fields] on field [" + propNode + "] or " +
|
throw new MapperParsingException("expected map for property [fields] on field [" + propNode + "] or " +
|
||||||
"[" + propName + "] but got a " + propNode.getClass());
|
"[" + propName + "] but got a " + propNode.getClass());
|
||||||
}
|
}
|
||||||
|
|
||||||
for (Map.Entry<String, Object> multiFieldEntry : multiFieldsPropNodes.entrySet()) {
|
for (Map.Entry<String, Object> multiFieldEntry : multiFieldsPropNodes.entrySet()) {
|
||||||
String multiFieldName = multiFieldEntry.getKey();
|
String multiFieldName = multiFieldEntry.getKey();
|
||||||
if (!(multiFieldEntry.getValue() instanceof Map)) {
|
if (!(multiFieldEntry.getValue() instanceof Map)) {
|
||||||
throw new MapperParsingException("Illegal field [" + multiFieldName + "], only fields can be specified inside fields");
|
throw new MapperParsingException("illegal field [" + multiFieldName + "], only fields can be specified inside fields");
|
||||||
}
|
}
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
Map<String, Object> multiFieldNodes = (Map<String, Object>) multiFieldEntry.getValue();
|
Map<String, Object> multiFieldNodes = (Map<String, Object>) multiFieldEntry.getValue();
|
||||||
|
@ -330,7 +330,7 @@ public class TypeParsers {
|
||||||
if (typeNode != null) {
|
if (typeNode != null) {
|
||||||
type = typeNode.toString();
|
type = typeNode.toString();
|
||||||
} else {
|
} else {
|
||||||
throw new MapperParsingException("No type specified for property [" + multiFieldName + "]");
|
throw new MapperParsingException("no type specified for property [" + multiFieldName + "]");
|
||||||
}
|
}
|
||||||
if (type.equals(ObjectMapper.CONTENT_TYPE) || type.equals(ObjectMapper.NESTED_CONTENT_TYPE)) {
|
if (type.equals(ObjectMapper.CONTENT_TYPE) || type.equals(ObjectMapper.NESTED_CONTENT_TYPE)) {
|
||||||
throw new MapperParsingException("Type [" + type + "] cannot be used in multi field");
|
throw new MapperParsingException("Type [" + type + "] cannot be used in multi field");
|
||||||
|
@ -338,7 +338,7 @@ public class TypeParsers {
|
||||||
|
|
||||||
Mapper.TypeParser typeParser = parserContext.typeParser(type);
|
Mapper.TypeParser typeParser = parserContext.typeParser(type);
|
||||||
if (typeParser == null) {
|
if (typeParser == null) {
|
||||||
throw new MapperParsingException("No handler for type [" + type + "] declared on field [" + multiFieldName + "]");
|
throw new MapperParsingException("no handler for type [" + type + "] declared on field [" + multiFieldName + "]");
|
||||||
}
|
}
|
||||||
builder.addMultiField(typeParser.parse(multiFieldName, multiFieldNodes, parserContext));
|
builder.addMultiField(typeParser.parse(multiFieldName, multiFieldNodes, parserContext));
|
||||||
multiFieldNodes.remove("type");
|
multiFieldNodes.remove("type");
|
||||||
|
@ -360,7 +360,7 @@ public class TypeParsers {
|
||||||
} else if (INDEX_OPTIONS_DOCS.equalsIgnoreCase(value)) {
|
} else if (INDEX_OPTIONS_DOCS.equalsIgnoreCase(value)) {
|
||||||
return IndexOptions.DOCS;
|
return IndexOptions.DOCS;
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("Failed to parse index option [" + value + "]");
|
throw new ElasticsearchParseException("failed to parse index option [{}]", value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -389,7 +389,7 @@ public class TypeParsers {
|
||||||
builder.storeTermVectorOffsets(true);
|
builder.storeTermVectorOffsets(true);
|
||||||
builder.storeTermVectorPayloads(true);
|
builder.storeTermVectorPayloads(true);
|
||||||
} else {
|
} else {
|
||||||
throw new MapperParsingException("Wrong value for termVector [" + termVector + "] for field [" + fieldName + "]");
|
throw new MapperParsingException("wrong value for termVector [" + termVector + "] for field [" + fieldName + "]");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -404,7 +404,7 @@ public class TypeParsers {
|
||||||
builder.index(true);
|
builder.index(true);
|
||||||
builder.tokenized(true);
|
builder.tokenized(true);
|
||||||
} else {
|
} else {
|
||||||
throw new MapperParsingException("Wrong value for index [" + index + "] for field [" + fieldName + "]");
|
throw new MapperParsingException("wrong value for index [" + index + "] for field [" + fieldName + "]");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -425,7 +425,7 @@ public class TypeParsers {
|
||||||
} else if ("full".equals(path)) {
|
} else if ("full".equals(path)) {
|
||||||
return ContentPath.Type.FULL;
|
return ContentPath.Type.FULL;
|
||||||
} else {
|
} else {
|
||||||
throw new MapperParsingException("Wrong value for pathType [" + path + "] for object [" + name + "]");
|
throw new MapperParsingException("wrong value for pathType [" + path + "] for object [" + name + "]");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -39,6 +39,8 @@ import java.io.IOException;
|
||||||
*/
|
*/
|
||||||
public class GeoBoundingBoxQueryParser implements QueryParser {
|
public class GeoBoundingBoxQueryParser implements QueryParser {
|
||||||
|
|
||||||
|
public static final String NAME = "geo_bbox";
|
||||||
|
|
||||||
public static final String TOP = "top";
|
public static final String TOP = "top";
|
||||||
public static final String LEFT = "left";
|
public static final String LEFT = "left";
|
||||||
public static final String RIGHT = "right";
|
public static final String RIGHT = "right";
|
||||||
|
@ -54,7 +56,6 @@ public class GeoBoundingBoxQueryParser implements QueryParser {
|
||||||
public static final String BOTTOMLEFT = "bottomLeft";
|
public static final String BOTTOMLEFT = "bottomLeft";
|
||||||
public static final String BOTTOMRIGHT = "bottomRight";
|
public static final String BOTTOMRIGHT = "bottomRight";
|
||||||
|
|
||||||
public static final String NAME = "geo_bbox";
|
|
||||||
public static final String FIELD = "field";
|
public static final String FIELD = "field";
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
|
@ -126,11 +127,11 @@ public class GeoBoundingBoxQueryParser implements QueryParser {
|
||||||
bottom = sparse.getLat();
|
bottom = sparse.getLat();
|
||||||
left = sparse.getLon();
|
left = sparse.getLon();
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("Unexpected field [" + currentFieldName + "]");
|
throw new ElasticsearchParseException("failed to parse [{}] query. unexpected field [{}]", NAME, currentFieldName);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("fieldname expected but [" + token + "] found");
|
throw new ElasticsearchParseException("failed to parse [{}] query. field name expected but [{}] found", NAME, token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if (token.isValue()) {
|
} else if (token.isValue()) {
|
||||||
|
@ -141,7 +142,7 @@ public class GeoBoundingBoxQueryParser implements QueryParser {
|
||||||
} else if ("type".equals(currentFieldName)) {
|
} else if ("type".equals(currentFieldName)) {
|
||||||
type = parser.text();
|
type = parser.text();
|
||||||
} else {
|
} else {
|
||||||
throw new QueryParsingException(parseContext, "[geo_bbox] query does not support [" + currentFieldName + "]");
|
throw new QueryParsingException(parseContext, "failed to parse [{}] query. unexpected field [{}]", NAME, currentFieldName);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -163,10 +164,10 @@ public class GeoBoundingBoxQueryParser implements QueryParser {
|
||||||
|
|
||||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||||
if (fieldType == null) {
|
if (fieldType == null) {
|
||||||
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
|
throw new QueryParsingException(parseContext, "failed to parse [{}] query. could not find [{}] field [{}]", NAME, GeoPointFieldMapper.CONTENT_TYPE, fieldName);
|
||||||
}
|
}
|
||||||
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
|
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
|
||||||
throw new QueryParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field");
|
throw new QueryParsingException(parseContext, "failed to parse [{}] query. field [{}] is expected to be of type [{}], but is of [{}] type instead", NAME, fieldName, GeoPointFieldMapper.CONTENT_TYPE, fieldType.names().shortName());
|
||||||
}
|
}
|
||||||
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
|
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
|
||||||
|
|
||||||
|
@ -177,8 +178,7 @@ public class GeoBoundingBoxQueryParser implements QueryParser {
|
||||||
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
|
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
|
||||||
filter = new InMemoryGeoBoundingBoxQuery(topLeft, bottomRight, indexFieldData);
|
filter = new InMemoryGeoBoundingBoxQuery(topLeft, bottomRight, indexFieldData);
|
||||||
} else {
|
} else {
|
||||||
throw new QueryParsingException(parseContext, "geo bounding box type [" + type
|
throw new QueryParsingException(parseContext, "failed to parse [{}] query. geo bounding box type [{}] is not supported. either [indexed] or [memory] are allowed", NAME, type);
|
||||||
+ "] not supported, either 'indexed' or 'memory' are allowed");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (queryName != null) {
|
if (queryName != null) {
|
||||||
|
|
|
@ -193,7 +193,7 @@ public class GeohashCellQuery {
|
||||||
|
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
if ((token = parser.currentToken()) != Token.START_OBJECT) {
|
if ((token = parser.currentToken()) != Token.START_OBJECT) {
|
||||||
throw new ElasticsearchParseException(NAME + " must be an object");
|
throw new ElasticsearchParseException("failed to parse [{}] query. expected an object but found [{}] instead", NAME, token);
|
||||||
}
|
}
|
||||||
|
|
||||||
while ((token = parser.nextToken()) != Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != Token.END_OBJECT) {
|
||||||
|
@ -229,27 +229,26 @@ public class GeohashCellQuery {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("unexpected token [" + token + "]");
|
throw new ElasticsearchParseException("failed to parse [{}] query. unexpected token [{}]", NAME, token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (geohash == null) {
|
if (geohash == null) {
|
||||||
throw new QueryParsingException(parseContext, "no geohash value provided to geohash_cell filter");
|
throw new QueryParsingException(parseContext, "failed to parse [{}] query. missing geohash value", NAME);
|
||||||
}
|
}
|
||||||
|
|
||||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||||
if (fieldType == null) {
|
if (fieldType == null) {
|
||||||
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
|
throw new QueryParsingException(parseContext, "failed to parse [{}] query. missing [{}] field [{}]", NAME, GeoPointFieldMapper.CONTENT_TYPE, fieldName);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
|
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
|
||||||
throw new QueryParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field");
|
throw new QueryParsingException(parseContext, "failed to parse [{}] query. field [{}] is not a geo_point field", NAME, fieldName);
|
||||||
}
|
}
|
||||||
|
|
||||||
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
|
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
|
||||||
if (!geoFieldType.isGeohashPrefixEnabled()) {
|
if (!geoFieldType.isGeohashPrefixEnabled()) {
|
||||||
throw new QueryParsingException(parseContext, "can't execute geohash_cell on field [" + fieldName
|
throw new QueryParsingException(parseContext, "failed to parse [{}] query. [geohash_prefix] is not enabled for field [{}]", NAME, fieldName);
|
||||||
+ "], geohash_prefix is not enabled");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if(levels > 0) {
|
if(levels > 0) {
|
||||||
|
|
|
@ -147,7 +147,7 @@ public class MultiMatchQueryBuilder extends QueryBuilder implements BoostableQue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (type == null) {
|
if (type == null) {
|
||||||
throw new ElasticsearchParseException("No type found for value: " + value);
|
throw new ElasticsearchParseException("failed to parse [{}] query type [{}]. unknown type.", MultiMatchQueryParser.NAME, value);
|
||||||
}
|
}
|
||||||
return type;
|
return type;
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,12 +39,12 @@ public class QueryParsingException extends IndexException {
|
||||||
private final int lineNumber;
|
private final int lineNumber;
|
||||||
private final int columnNumber;
|
private final int columnNumber;
|
||||||
|
|
||||||
public QueryParsingException(QueryParseContext parseContext, String msg) {
|
public QueryParsingException(QueryParseContext parseContext, String msg, Object... args) {
|
||||||
this(parseContext, msg, null);
|
this(parseContext, msg, null, args);
|
||||||
}
|
}
|
||||||
|
|
||||||
public QueryParsingException(QueryParseContext parseContext, String msg, Throwable cause) {
|
public QueryParsingException(QueryParseContext parseContext, String msg, Throwable cause, Object... args) {
|
||||||
super(parseContext.index(), msg, cause);
|
super(parseContext.index(), msg, cause, args);
|
||||||
int lineNumber = UNKNOWN_POSITION;
|
int lineNumber = UNKNOWN_POSITION;
|
||||||
int columnNumber = UNKNOWN_POSITION;
|
int columnNumber = UNKNOWN_POSITION;
|
||||||
XContentParser parser = parseContext.parser();
|
XContentParser parser = parseContext.parser();
|
||||||
|
|
|
@ -135,11 +135,11 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
||||||
} else if (MULTI_VALUE_MODE.match(currentFieldName)) {
|
} else if (MULTI_VALUE_MODE.match(currentFieldName)) {
|
||||||
multiValueMode = parser.text();
|
multiValueMode = parser.text();
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("Malformed score function score parameters.");
|
throw new ElasticsearchParseException("malformed score function score parameters.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (fieldName == null) {
|
if (fieldName == null) {
|
||||||
throw new ElasticsearchParseException("Malformed score function score parameters.");
|
throw new ElasticsearchParseException("malformed score function score parameters.");
|
||||||
}
|
}
|
||||||
XContentParser variableParser = XContentFactory.xContent(variableContent.string()).createParser(variableContent.string());
|
XContentParser variableParser = XContentFactory.xContent(variableContent.string()).createParser(variableContent.string());
|
||||||
scoreFunction = parseVariable(fieldName, variableParser, parseContext, MultiValueMode.fromString(multiValueMode.toUpperCase(Locale.ROOT)));
|
scoreFunction = parseVariable(fieldName, variableParser, parseContext, MultiValueMode.fromString(multiValueMode.toUpperCase(Locale.ROOT)));
|
||||||
|
@ -153,7 +153,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
||||||
// the doc later
|
// the doc later
|
||||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||||
if (fieldType == null) {
|
if (fieldType == null) {
|
||||||
throw new QueryParsingException(parseContext, "Unknown field [" + fieldName + "]");
|
throw new QueryParsingException(parseContext, "unknown field [{}]", fieldName);
|
||||||
}
|
}
|
||||||
|
|
||||||
// dates and time need special handling
|
// dates and time need special handling
|
||||||
|
@ -165,8 +165,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
||||||
} else if (fieldType instanceof NumberFieldMapper.NumberFieldType) {
|
} else if (fieldType instanceof NumberFieldMapper.NumberFieldType) {
|
||||||
return parseNumberVariable(fieldName, parser, parseContext, (NumberFieldMapper.NumberFieldType) fieldType, mode);
|
return parseNumberVariable(fieldName, parser, parseContext, (NumberFieldMapper.NumberFieldType) fieldType, mode);
|
||||||
} else {
|
} else {
|
||||||
throw new QueryParsingException(parseContext, "Field " + fieldName + " is of type " + fieldType
|
throw new QueryParsingException(parseContext, "field [{}] is of type [{}], but only numeric types are supported.", fieldName, fieldType);
|
||||||
+ ", but only numeric types are supported.");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -194,12 +193,11 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
||||||
} else if (parameterName.equals(DecayFunctionBuilder.OFFSET)) {
|
} else if (parameterName.equals(DecayFunctionBuilder.OFFSET)) {
|
||||||
offset = parser.doubleValue();
|
offset = parser.doubleValue();
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("Parameter " + parameterName + " not supported!");
|
throw new ElasticsearchParseException("parameter [{}] not supported!", parameterName);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!scaleFound || !refFound) {
|
if (!scaleFound || !refFound) {
|
||||||
throw new ElasticsearchParseException("Both " + DecayFunctionBuilder.SCALE + " and " + DecayFunctionBuilder.ORIGIN
|
throw new ElasticsearchParseException("both [{}] and [{}] must be set for numeric fields.", DecayFunctionBuilder.SCALE, DecayFunctionBuilder.ORIGIN);
|
||||||
+ " must be set for numeric fields.");
|
|
||||||
}
|
}
|
||||||
IndexNumericFieldData numericFieldData = parseContext.getForField(fieldType);
|
IndexNumericFieldData numericFieldData = parseContext.getForField(fieldType);
|
||||||
return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode);
|
return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode);
|
||||||
|
@ -225,11 +223,11 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
||||||
} else if (parameterName.equals(DecayFunctionBuilder.OFFSET)) {
|
} else if (parameterName.equals(DecayFunctionBuilder.OFFSET)) {
|
||||||
offsetString = parser.text();
|
offsetString = parser.text();
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("Parameter " + parameterName + " not supported!");
|
throw new ElasticsearchParseException("parameter [{}] not supported!", parameterName);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (origin == null || scaleString == null) {
|
if (origin == null || scaleString == null) {
|
||||||
throw new ElasticsearchParseException(DecayFunctionBuilder.ORIGIN + " and " + DecayFunctionBuilder.SCALE + " must be set for geo fields.");
|
throw new ElasticsearchParseException("[{}] and [{}] must be set for geo fields.", DecayFunctionBuilder.ORIGIN, DecayFunctionBuilder.SCALE);
|
||||||
}
|
}
|
||||||
double scale = DistanceUnit.DEFAULT.parse(scaleString, DistanceUnit.DEFAULT);
|
double scale = DistanceUnit.DEFAULT.parse(scaleString, DistanceUnit.DEFAULT);
|
||||||
double offset = DistanceUnit.DEFAULT.parse(offsetString, DistanceUnit.DEFAULT);
|
double offset = DistanceUnit.DEFAULT.parse(offsetString, DistanceUnit.DEFAULT);
|
||||||
|
@ -258,7 +256,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
||||||
} else if (parameterName.equals(DecayFunctionBuilder.OFFSET)) {
|
} else if (parameterName.equals(DecayFunctionBuilder.OFFSET)) {
|
||||||
offsetString = parser.text();
|
offsetString = parser.text();
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("Parameter " + parameterName + " not supported!");
|
throw new ElasticsearchParseException("parameter [{}] not supported!", parameterName);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
long origin = SearchContext.current().nowInMillis();
|
long origin = SearchContext.current().nowInMillis();
|
||||||
|
@ -267,7 +265,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (scaleString == null) {
|
if (scaleString == null) {
|
||||||
throw new ElasticsearchParseException(DecayFunctionBuilder.SCALE + " must be set for date fields.");
|
throw new ElasticsearchParseException("[{}] must be set for date fields.", DecayFunctionBuilder.SCALE);
|
||||||
}
|
}
|
||||||
TimeValue val = TimeValue.parseTimeValue(scaleString, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".scale");
|
TimeValue val = TimeValue.parseTimeValue(scaleString, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".scale");
|
||||||
double scale = val.getMillis();
|
double scale = val.getMillis();
|
||||||
|
|
|
@ -52,14 +52,16 @@ import java.util.Arrays;
|
||||||
public class FunctionScoreQueryParser implements QueryParser {
|
public class FunctionScoreQueryParser implements QueryParser {
|
||||||
|
|
||||||
public static final String NAME = "function_score";
|
public static final String NAME = "function_score";
|
||||||
ScoreFunctionParserMapper functionParserMapper;
|
|
||||||
// For better readability of error message
|
// For better readability of error message
|
||||||
static final String MISPLACED_FUNCTION_MESSAGE_PREFIX = "You can either define \"functions\":[...] or a single function, not both. ";
|
static final String MISPLACED_FUNCTION_MESSAGE_PREFIX = "you can either define [functions] array or a single function, not both. ";
|
||||||
static final String MISPLACED_BOOST_FUNCTION_MESSAGE_SUFFIX = " Did you mean \"boost\" instead?";
|
static final String MISPLACED_BOOST_FUNCTION_MESSAGE_SUFFIX = " did you mean [boost] instead?";
|
||||||
|
|
||||||
public static final ParseField WEIGHT_FIELD = new ParseField("weight");
|
public static final ParseField WEIGHT_FIELD = new ParseField("weight");
|
||||||
private static final ParseField FILTER_FIELD = new ParseField("filter").withAllDeprecated("query");
|
private static final ParseField FILTER_FIELD = new ParseField("filter").withAllDeprecated("query");
|
||||||
|
|
||||||
|
ScoreFunctionParserMapper functionParserMapper;
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public FunctionScoreQueryParser(ScoreFunctionParserMapper functionParserMapper) {
|
public FunctionScoreQueryParser(ScoreFunctionParserMapper functionParserMapper) {
|
||||||
this.functionParserMapper = functionParserMapper;
|
this.functionParserMapper = functionParserMapper;
|
||||||
|
@ -121,7 +123,7 @@ public class FunctionScoreQueryParser implements QueryParser {
|
||||||
minScore = parser.floatValue();
|
minScore = parser.floatValue();
|
||||||
} else if ("functions".equals(currentFieldName)) {
|
} else if ("functions".equals(currentFieldName)) {
|
||||||
if (singleFunctionFound) {
|
if (singleFunctionFound) {
|
||||||
String errorString = "Found \"" + singleFunctionName + "\" already, now encountering \"functions\": [...].";
|
String errorString = "already found [" + singleFunctionName + "], now encountering [functions].";
|
||||||
handleMisplacedFunctionsDeclaration(errorString, singleFunctionName);
|
handleMisplacedFunctionsDeclaration(errorString, singleFunctionName);
|
||||||
}
|
}
|
||||||
currentFieldName = parseFiltersAndFunctions(parseContext, parser, filterFunctions, currentFieldName);
|
currentFieldName = parseFiltersAndFunctions(parseContext, parser, filterFunctions, currentFieldName);
|
||||||
|
@ -138,12 +140,11 @@ public class FunctionScoreQueryParser implements QueryParser {
|
||||||
scoreFunction = functionParserMapper.get(parseContext, currentFieldName).parse(parseContext, parser);
|
scoreFunction = functionParserMapper.get(parseContext, currentFieldName).parse(parseContext, parser);
|
||||||
}
|
}
|
||||||
if (functionArrayFound) {
|
if (functionArrayFound) {
|
||||||
String errorString = "Found \"functions\": [...] already, now encountering \"" + currentFieldName + "\".";
|
String errorString = "already found [functions] array, now encountering [" + currentFieldName + "].";
|
||||||
handleMisplacedFunctionsDeclaration(errorString, currentFieldName);
|
handleMisplacedFunctionsDeclaration(errorString, currentFieldName);
|
||||||
}
|
}
|
||||||
if (filterFunctions.size() > 0) {
|
if (filterFunctions.size() > 0) {
|
||||||
String errorString = "Found function " + singleFunctionName + " already, now encountering \"" + currentFieldName + "\". Use functions[{...},...] if you want to define several functions.";
|
throw new ElasticsearchParseException("failed to parse [{}] query. already found function [{}], now encountering [{}]. use [functions] array if you want to define several functions.", NAME, singleFunctionName, currentFieldName);
|
||||||
throw new ElasticsearchParseException(errorString);
|
|
||||||
}
|
}
|
||||||
filterFunctions.add(new FiltersFunctionScoreQuery.FilterFunction(null, scoreFunction));
|
filterFunctions.add(new FiltersFunctionScoreQuery.FilterFunction(null, scoreFunction));
|
||||||
singleFunctionFound = true;
|
singleFunctionFound = true;
|
||||||
|
@ -195,7 +196,7 @@ public class FunctionScoreQueryParser implements QueryParser {
|
||||||
if (Arrays.asList(FactorParser.NAMES).contains(functionName)) {
|
if (Arrays.asList(FactorParser.NAMES).contains(functionName)) {
|
||||||
errorString = errorString + MISPLACED_BOOST_FUNCTION_MESSAGE_SUFFIX;
|
errorString = errorString + MISPLACED_BOOST_FUNCTION_MESSAGE_SUFFIX;
|
||||||
}
|
}
|
||||||
throw new ElasticsearchParseException(errorString);
|
throw new ElasticsearchParseException("failed to parse [{}] query. [{}]", NAME, errorString);
|
||||||
}
|
}
|
||||||
|
|
||||||
private String parseFiltersAndFunctions(QueryParseContext parseContext, XContentParser parser,
|
private String parseFiltersAndFunctions(QueryParseContext parseContext, XContentParser parser,
|
||||||
|
@ -206,8 +207,7 @@ public class FunctionScoreQueryParser implements QueryParser {
|
||||||
ScoreFunction scoreFunction = null;
|
ScoreFunction scoreFunction = null;
|
||||||
Float functionWeight = null;
|
Float functionWeight = null;
|
||||||
if (token != XContentParser.Token.START_OBJECT) {
|
if (token != XContentParser.Token.START_OBJECT) {
|
||||||
throw new QueryParsingException(parseContext, NAME + ": malformed query, expected a " + XContentParser.Token.START_OBJECT
|
throw new QueryParsingException(parseContext, "failed to parse [{}]. malformed query, expected a [{}] while parsing functions but got a [{}] instead", XContentParser.Token.START_OBJECT, token, NAME);
|
||||||
+ " while parsing functions but got a " + token);
|
|
||||||
} else {
|
} else {
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
if (token == XContentParser.Token.FIELD_NAME) {
|
||||||
|
@ -234,7 +234,7 @@ public class FunctionScoreQueryParser implements QueryParser {
|
||||||
filter = Queries.newMatchAllQuery();
|
filter = Queries.newMatchAllQuery();
|
||||||
}
|
}
|
||||||
if (scoreFunction == null) {
|
if (scoreFunction == null) {
|
||||||
throw new ElasticsearchParseException("function_score: One entry in functions list is missing a function.");
|
throw new ElasticsearchParseException("failed to parse [{}] query. an entry in functions list is missing a function.", NAME);
|
||||||
}
|
}
|
||||||
filterFunctions.add(new FiltersFunctionScoreQuery.FilterFunction(filter, scoreFunction));
|
filterFunctions.add(new FiltersFunctionScoreQuery.FilterFunction(filter, scoreFunction));
|
||||||
|
|
||||||
|
@ -257,7 +257,7 @@ public class FunctionScoreQueryParser implements QueryParser {
|
||||||
} else if ("first".equals(scoreMode)) {
|
} else if ("first".equals(scoreMode)) {
|
||||||
return FiltersFunctionScoreQuery.ScoreMode.First;
|
return FiltersFunctionScoreQuery.ScoreMode.First;
|
||||||
} else {
|
} else {
|
||||||
throw new QueryParsingException(parseContext, NAME + " illegal score_mode [" + scoreMode + "]");
|
throw new QueryParsingException(parseContext, "failed to parse [{}] query. illegal score_mode [{}]", NAME, scoreMode);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -265,7 +265,7 @@ public class FunctionScoreQueryParser implements QueryParser {
|
||||||
String boostMode = parser.text();
|
String boostMode = parser.text();
|
||||||
CombineFunction cf = combineFunctionsMap.get(boostMode);
|
CombineFunction cf = combineFunctionsMap.get(boostMode);
|
||||||
if (cf == null) {
|
if (cf == null) {
|
||||||
throw new QueryParsingException(parseContext, NAME + " illegal boost_mode [" + boostMode + "]");
|
throw new QueryParsingException(parseContext, "failed to parse [{}] query. illegal boost_mode [{}]", NAME, boostMode);
|
||||||
}
|
}
|
||||||
return cf;
|
return cf;
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,13 +28,13 @@ import java.io.IOException;
|
||||||
|
|
||||||
public interface ScoreFunctionParser {
|
public interface ScoreFunctionParser {
|
||||||
|
|
||||||
public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException;
|
ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the name of the function, for example "linear", "gauss" etc. This
|
* Returns the name of the function, for example "linear", "gauss" etc. This
|
||||||
* name is used for registering the parser in
|
* name is used for registering the parser in
|
||||||
* {@link FunctionScoreQueryParser}.
|
* {@link FunctionScoreQueryParser}.
|
||||||
* */
|
* */
|
||||||
public String[] getNames();
|
String[] getNames();
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -287,13 +287,13 @@ public class BlobStoreIndexShardSnapshot {
|
||||||
metaHash.offset = 0;
|
metaHash.offset = 0;
|
||||||
metaHash.length = metaHash.bytes.length;
|
metaHash.length = metaHash.bytes.length;
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("unknown parameter [" + currentFieldName + "]");
|
throw new ElasticsearchParseException("unknown parameter [{}]", currentFieldName);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("unexpected token [" + token + "]");
|
throw new ElasticsearchParseException("unexpected token [{}]", token);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("unexpected token [" + token + "]");
|
throw new ElasticsearchParseException("unexpected token [{}]",token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -480,7 +480,7 @@ public class BlobStoreIndexShardSnapshot {
|
||||||
} else if (ParseFields.TOTAL_SIZE.match(currentFieldName)) {
|
} else if (ParseFields.TOTAL_SIZE.match(currentFieldName)) {
|
||||||
totalSize = parser.longValue();
|
totalSize = parser.longValue();
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("unknown parameter [" + currentFieldName + "]");
|
throw new ElasticsearchParseException("unknown parameter [{}]", currentFieldName);
|
||||||
}
|
}
|
||||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||||
if (ParseFields.FILES.match(currentFieldName)) {
|
if (ParseFields.FILES.match(currentFieldName)) {
|
||||||
|
@ -488,13 +488,13 @@ public class BlobStoreIndexShardSnapshot {
|
||||||
indexFiles.add(FileInfo.fromXContent(parser));
|
indexFiles.add(FileInfo.fromXContent(parser));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("unknown parameter [" + currentFieldName + "]");
|
throw new ElasticsearchParseException("unknown parameter [{}]", currentFieldName);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("unexpected token [" + token + "]");
|
throw new ElasticsearchParseException("unexpected token [{}]", token);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("unexpected token [" + token + "]");
|
throw new ElasticsearchParseException("unexpected token [{}]", token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -231,13 +231,13 @@ public class BlobStoreIndexShardSnapshots implements Iterable<SnapshotFiles>, To
|
||||||
if (token == XContentParser.Token.START_OBJECT) {
|
if (token == XContentParser.Token.START_OBJECT) {
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
if (token != XContentParser.Token.FIELD_NAME) {
|
if (token != XContentParser.Token.FIELD_NAME) {
|
||||||
throw new ElasticsearchParseException("unexpected token [" + token + "]");
|
throw new ElasticsearchParseException("unexpected token [{}]", token);
|
||||||
}
|
}
|
||||||
String currentFieldName = parser.currentName();
|
String currentFieldName = parser.currentName();
|
||||||
token = parser.nextToken();
|
token = parser.nextToken();
|
||||||
if (token == XContentParser.Token.START_ARRAY) {
|
if (token == XContentParser.Token.START_ARRAY) {
|
||||||
if (ParseFields.FILES.match(currentFieldName) == false) {
|
if (ParseFields.FILES.match(currentFieldName) == false) {
|
||||||
throw new ElasticsearchParseException("unknown array [" + currentFieldName + "]");
|
throw new ElasticsearchParseException("unknown array [{}]", currentFieldName);
|
||||||
}
|
}
|
||||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||||
FileInfo fileInfo = FileInfo.fromXContent(parser);
|
FileInfo fileInfo = FileInfo.fromXContent(parser);
|
||||||
|
@ -245,22 +245,22 @@ public class BlobStoreIndexShardSnapshots implements Iterable<SnapshotFiles>, To
|
||||||
}
|
}
|
||||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||||
if (ParseFields.SNAPSHOTS.match(currentFieldName) == false) {
|
if (ParseFields.SNAPSHOTS.match(currentFieldName) == false) {
|
||||||
throw new ElasticsearchParseException("unknown object [" + currentFieldName + "]");
|
throw new ElasticsearchParseException("unknown object [{}]", currentFieldName);
|
||||||
}
|
}
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
if (token != XContentParser.Token.FIELD_NAME) {
|
if (token != XContentParser.Token.FIELD_NAME) {
|
||||||
throw new ElasticsearchParseException("unknown object [" + currentFieldName + "]");
|
throw new ElasticsearchParseException("unknown object [{}]", currentFieldName);
|
||||||
}
|
}
|
||||||
String snapshot = parser.currentName();
|
String snapshot = parser.currentName();
|
||||||
if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
|
if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
|
||||||
throw new ElasticsearchParseException("unknown object [" + currentFieldName + "]");
|
throw new ElasticsearchParseException("unknown object [{}]", currentFieldName);
|
||||||
}
|
}
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
if (token == XContentParser.Token.FIELD_NAME) {
|
||||||
currentFieldName = parser.currentName();
|
currentFieldName = parser.currentName();
|
||||||
if (parser.nextToken() == XContentParser.Token.START_ARRAY) {
|
if (parser.nextToken() == XContentParser.Token.START_ARRAY) {
|
||||||
if (ParseFields.FILES.match(currentFieldName) == false) {
|
if (ParseFields.FILES.match(currentFieldName) == false) {
|
||||||
throw new ElasticsearchParseException("unknown array [" + currentFieldName + "]");
|
throw new ElasticsearchParseException("unknown array [{}]", currentFieldName);
|
||||||
}
|
}
|
||||||
List<String> fileNames = newArrayList();
|
List<String> fileNames = newArrayList();
|
||||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||||
|
@ -272,7 +272,7 @@ public class BlobStoreIndexShardSnapshots implements Iterable<SnapshotFiles>, To
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("unexpected token [" + token + "]");
|
throw new ElasticsearchParseException("unexpected token [{}]", token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -325,7 +325,7 @@ public class PercolatorService extends AbstractComponent {
|
||||||
if ("size".equals(currentFieldName)) {
|
if ("size".equals(currentFieldName)) {
|
||||||
context.size(parser.intValue());
|
context.size(parser.intValue());
|
||||||
if (context.size() < 0) {
|
if (context.size() < 0) {
|
||||||
throw new ElasticsearchParseException("size is set to [" + context.size() + "] and is expected to be higher or equal to 0");
|
throw new ElasticsearchParseException("size is set to [{}] and is expected to be higher or equal to 0", context.size());
|
||||||
}
|
}
|
||||||
} else if ("sort".equals(currentFieldName)) {
|
} else if ("sort".equals(currentFieldName)) {
|
||||||
parseSort(parser, context);
|
parseSort(parser, context);
|
||||||
|
|
|
@ -471,7 +471,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!ignoreIndexErrors) {
|
if (!ignoreIndexErrors) {
|
||||||
throw new ElasticsearchParseException("unexpected token [" + token + "]");
|
throw new ElasticsearchParseException("unexpected token [{}]", token);
|
||||||
} else {
|
} else {
|
||||||
logger.warn("[{}] [{}] unexpected token while reading snapshot metadata [{}]", snapshotId, index, token);
|
logger.warn("[{}] [{}] unexpected token while reading snapshot metadata [{}]", snapshotId, index, token);
|
||||||
}
|
}
|
||||||
|
@ -524,7 +524,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
throw new ElasticsearchParseException("unexpected token [" + token + "]");
|
throw new ElasticsearchParseException("unexpected token [{}]", token);
|
||||||
} catch (JsonParseException ex) {
|
} catch (JsonParseException ex) {
|
||||||
throw new ElasticsearchParseException("failed to read snapshot", ex);
|
throw new ElasticsearchParseException("failed to read snapshot", ex);
|
||||||
}
|
}
|
||||||
|
@ -549,7 +549,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
throw new ElasticsearchParseException("unexpected token [" + token + "]");
|
throw new ElasticsearchParseException("unexpected token [{}]", token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -74,7 +74,7 @@ public class RestRenderSearchTemplateAction extends BaseRestHandler {
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
XContentParser.Token token = parser.nextToken();
|
XContentParser.Token token = parser.nextToken();
|
||||||
if (token != XContentParser.Token.START_OBJECT) {
|
if (token != XContentParser.Token.START_OBJECT) {
|
||||||
throw new ElasticsearchParseException("request body must start with [" + XContentParser.Token.START_OBJECT + "] but found [" + token + "]");
|
throw new ElasticsearchParseException("failed to parse request. request body must be an object but found [{}] instead", token);
|
||||||
}
|
}
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
if (token == XContentParser.Token.FIELD_NAME) {
|
||||||
|
@ -83,10 +83,10 @@ public class RestRenderSearchTemplateAction extends BaseRestHandler {
|
||||||
if (token == XContentParser.Token.START_OBJECT) {
|
if (token == XContentParser.Token.START_OBJECT) {
|
||||||
params = parser.map();
|
params = parser.map();
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("Expected [" + XContentParser.Token.START_OBJECT + "] for [params] but found [" + token + "]");
|
throw new ElasticsearchParseException("failed to parse request. field [{}] is expected to be an object, but found [{}] instead", currentFieldName, token);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("Unknown field [" + currentFieldName + "] of type [" + token + "]");
|
throw new ElasticsearchParseException("failed to parse request. unknown field [{}] of type [{}]", currentFieldName, token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
template = new Template(templateId, ScriptType.INDEXED, MustacheScriptEngineService.NAME, null, params);
|
template = new Template(templateId, ScriptType.INDEXED, MustacheScriptEngineService.NAME, null, params);
|
||||||
|
|
|
@ -23,7 +23,6 @@ import com.carrotsearch.hppc.ObjectHashSet;
|
||||||
import com.carrotsearch.hppc.ObjectSet;
|
import com.carrotsearch.hppc.ObjectSet;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
|
|
||||||
import org.apache.lucene.index.IndexOptions;
|
import org.apache.lucene.index.IndexOptions;
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.apache.lucene.index.NumericDocValues;
|
import org.apache.lucene.index.NumericDocValues;
|
||||||
|
@ -50,6 +49,7 @@ import org.elasticsearch.common.util.concurrent.ConcurrentMapLong;
|
||||||
import org.elasticsearch.common.util.concurrent.FutureUtils;
|
import org.elasticsearch.common.util.concurrent.FutureUtils;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentLocation;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.IndexService;
|
import org.elasticsearch.index.IndexService;
|
||||||
|
@ -83,23 +83,10 @@ import org.elasticsearch.script.mustache.MustacheScriptEngineService;
|
||||||
import org.elasticsearch.search.dfs.CachedDfSource;
|
import org.elasticsearch.search.dfs.CachedDfSource;
|
||||||
import org.elasticsearch.search.dfs.DfsPhase;
|
import org.elasticsearch.search.dfs.DfsPhase;
|
||||||
import org.elasticsearch.search.dfs.DfsSearchResult;
|
import org.elasticsearch.search.dfs.DfsSearchResult;
|
||||||
import org.elasticsearch.search.fetch.FetchPhase;
|
import org.elasticsearch.search.fetch.*;
|
||||||
import org.elasticsearch.search.fetch.FetchSearchResult;
|
import org.elasticsearch.search.internal.*;
|
||||||
import org.elasticsearch.search.fetch.QueryFetchSearchResult;
|
|
||||||
import org.elasticsearch.search.fetch.ScrollQueryFetchSearchResult;
|
|
||||||
import org.elasticsearch.search.fetch.ShardFetchRequest;
|
|
||||||
import org.elasticsearch.search.internal.DefaultSearchContext;
|
|
||||||
import org.elasticsearch.search.internal.InternalScrollSearchRequest;
|
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
|
||||||
import org.elasticsearch.search.internal.SearchContext.Lifetime;
|
import org.elasticsearch.search.internal.SearchContext.Lifetime;
|
||||||
import org.elasticsearch.search.internal.ShardSearchLocalRequest;
|
import org.elasticsearch.search.query.*;
|
||||||
import org.elasticsearch.search.internal.ShardSearchRequest;
|
|
||||||
import org.elasticsearch.search.query.QueryPhase;
|
|
||||||
import org.elasticsearch.search.query.QueryPhaseExecutionException;
|
|
||||||
import org.elasticsearch.search.query.QuerySearchRequest;
|
|
||||||
import org.elasticsearch.search.query.QuerySearchResult;
|
|
||||||
import org.elasticsearch.search.query.QuerySearchResultProvider;
|
|
||||||
import org.elasticsearch.search.query.ScrollQuerySearchResult;
|
|
||||||
import org.elasticsearch.search.warmer.IndexWarmersMetaData;
|
import org.elasticsearch.search.warmer.IndexWarmersMetaData;
|
||||||
import org.elasticsearch.threadpool.ThreadPool;
|
import org.elasticsearch.threadpool.ThreadPool;
|
||||||
|
|
||||||
|
@ -767,7 +754,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
token = parser.nextToken();
|
token = parser.nextToken();
|
||||||
if (token != XContentParser.Token.START_OBJECT) {
|
if (token != XContentParser.Token.START_OBJECT) {
|
||||||
throw new ElasticsearchParseException("Expected START_OBJECT but got " + token.name() + " " + parser.currentName());
|
throw new ElasticsearchParseException("failed to parse search source. source must be an object, but found [{}] instead", token.name());
|
||||||
}
|
}
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
if (token == XContentParser.Token.FIELD_NAME) {
|
||||||
|
@ -775,14 +762,14 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
SearchParseElement element = elementParsers.get(fieldName);
|
SearchParseElement element = elementParsers.get(fieldName);
|
||||||
if (element == null) {
|
if (element == null) {
|
||||||
throw new SearchParseException(context, "No parser for element [" + fieldName + "]", parser.getTokenLocation());
|
throw new SearchParseException(context, "failed to parse search source. unknown search element [" + fieldName + "]", parser.getTokenLocation());
|
||||||
}
|
}
|
||||||
element.parse(parser, context);
|
element.parse(parser, context);
|
||||||
} else {
|
} else {
|
||||||
if (token == null) {
|
if (token == null) {
|
||||||
throw new ElasticsearchParseException("End of query source reached but query is not complete.");
|
throw new ElasticsearchParseException("failed to parse search source. end of query source reached but query is not complete.");
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("Expected field name but got " + token.name() + " \"" + parser.currentName() + "\"");
|
throw new ElasticsearchParseException("failed to parse search source. expected field name but got [{}]", token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -793,7 +780,8 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
||||||
} catch (Throwable e1) {
|
} catch (Throwable e1) {
|
||||||
// ignore
|
// ignore
|
||||||
}
|
}
|
||||||
throw new SearchParseException(context, "Failed to parse source [" + sSource + "]", parser.getTokenLocation(), e);
|
XContentLocation location = parser != null ? parser.getTokenLocation() : null;
|
||||||
|
throw new SearchParseException(context, "failed to parse search source [" + sSource + "]", location, e);
|
||||||
} finally {
|
} finally {
|
||||||
if (parser != null) {
|
if (parser != null) {
|
||||||
parser.close();
|
parser.close();
|
||||||
|
|
|
@ -145,7 +145,7 @@ public abstract class Aggregator extends BucketCollector implements Releasable {
|
||||||
return mode;
|
return mode;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
throw new ElasticsearchParseException("No " + KEY.getPreferredName() + " found for value [" + value + "]");
|
throw new ElasticsearchParseException("no [{}] found for value [{}]", KEY.getPreferredName(), value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -123,7 +123,7 @@ public class GND extends NXYSignificanceHeuristic {
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
backgroundIsSuperset = parser.booleanValue();
|
backgroundIsSuperset = parser.booleanValue();
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("Field " + parser.currentName().toString() + " unknown for " + givenName);
|
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. unknown field [{}]", givenName, parser.currentName());
|
||||||
}
|
}
|
||||||
token = parser.nextToken();
|
token = parser.nextToken();
|
||||||
}
|
}
|
||||||
|
|
|
@ -110,7 +110,7 @@ public class JLHScore extends SignificanceHeuristic {
|
||||||
public SignificanceHeuristic parse(XContentParser parser) throws IOException, QueryParsingException {
|
public SignificanceHeuristic parse(XContentParser parser) throws IOException, QueryParsingException {
|
||||||
// move to the closing bracket
|
// move to the closing bracket
|
||||||
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
|
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
|
||||||
throw new ElasticsearchParseException("expected }, got " + parser.currentName() + " instead in jhl score");
|
throw new ElasticsearchParseException("failed to parse [jhl] significance heuristic. expected an empty object, but found [{}] instead", parser.currentToken());
|
||||||
}
|
}
|
||||||
return new JLHScore();
|
return new JLHScore();
|
||||||
}
|
}
|
||||||
|
|
|
@ -150,7 +150,7 @@ public abstract class NXYSignificanceHeuristic extends SignificanceHeuristic {
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
backgroundIsSuperset = parser.booleanValue();
|
backgroundIsSuperset = parser.booleanValue();
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("Field " + parser.currentName().toString() + " unknown for " + givenName);
|
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. unknown field [{}]", givenName, parser.currentName());
|
||||||
}
|
}
|
||||||
token = parser.nextToken();
|
token = parser.nextToken();
|
||||||
}
|
}
|
||||||
|
|
|
@ -79,7 +79,7 @@ public class PercentageScore extends SignificanceHeuristic {
|
||||||
public SignificanceHeuristic parse(XContentParser parser) throws IOException, QueryParsingException {
|
public SignificanceHeuristic parse(XContentParser parser) throws IOException, QueryParsingException {
|
||||||
// move to the closing bracket
|
// move to the closing bracket
|
||||||
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
|
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
|
||||||
throw new ElasticsearchParseException("expected }, got " + parser.currentName() + " instead in percentage score");
|
throw new ElasticsearchParseException("failed to parse [percentage] significance heuristic. expected an empty object, but got [{}] instead", parser.currentToken());
|
||||||
}
|
}
|
||||||
return new PercentageScore();
|
return new PercentageScore();
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,14 +30,9 @@ import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.query.QueryParsingException;
|
import org.elasticsearch.index.query.QueryParsingException;
|
||||||
import org.elasticsearch.script.ExecutableScript;
|
import org.elasticsearch.script.*;
|
||||||
import org.elasticsearch.script.Script;
|
|
||||||
import org.elasticsearch.script.Script.ScriptField;
|
import org.elasticsearch.script.Script.ScriptField;
|
||||||
import org.elasticsearch.script.ScriptContext;
|
|
||||||
import org.elasticsearch.script.ScriptParameterParser;
|
|
||||||
import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue;
|
import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue;
|
||||||
import org.elasticsearch.script.ScriptService;
|
|
||||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -134,7 +129,8 @@ public class ScriptHeuristic extends SignificanceHeuristic {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SignificanceHeuristic parse(XContentParser parser) throws IOException, QueryParsingException {
|
public SignificanceHeuristic parse(XContentParser parser) throws IOException, QueryParsingException {
|
||||||
NAMES_FIELD.match(parser.currentName(), ParseField.EMPTY_FLAGS);
|
String heuristicName = parser.currentName();
|
||||||
|
NAMES_FIELD.match(heuristicName, ParseField.EMPTY_FLAGS);
|
||||||
Script script = null;
|
Script script = null;
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
Map<String, Object> params = null;
|
Map<String, Object> params = null;
|
||||||
|
@ -149,10 +145,10 @@ public class ScriptHeuristic extends SignificanceHeuristic {
|
||||||
} else if ("params".equals(currentFieldName)) { // TODO remove in 2.0 (here to support old script APIs)
|
} else if ("params".equals(currentFieldName)) { // TODO remove in 2.0 (here to support old script APIs)
|
||||||
params = parser.map();
|
params = parser.map();
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("unknown object " + currentFieldName + " in script_heuristic");
|
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. unknown object [{}]", heuristicName, currentFieldName);
|
||||||
}
|
}
|
||||||
} else if (!scriptParameterParser.token(currentFieldName, token, parser)) {
|
} else if (!scriptParameterParser.token(currentFieldName, token, parser)) {
|
||||||
throw new ElasticsearchParseException("unknown field " + currentFieldName + " in script_heuristic");
|
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. unknown field [{}]", heuristicName, currentFieldName);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -165,17 +161,17 @@ public class ScriptHeuristic extends SignificanceHeuristic {
|
||||||
script = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), params);
|
script = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), params);
|
||||||
}
|
}
|
||||||
} else if (params != null) {
|
} else if (params != null) {
|
||||||
throw new ElasticsearchParseException("script params must be specified inside script object");
|
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. script params must be specified inside script object", heuristicName);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (script == null) {
|
if (script == null) {
|
||||||
throw new ElasticsearchParseException("No script found in script_heuristic");
|
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. no script found in script_heuristic", heuristicName);
|
||||||
}
|
}
|
||||||
ExecutableScript searchScript;
|
ExecutableScript searchScript;
|
||||||
try {
|
try {
|
||||||
searchScript = scriptService.executable(script, ScriptContext.Standard.AGGS);
|
searchScript = scriptService.executable(script, ScriptContext.Standard.AGGS);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new ElasticsearchParseException("The script [" + script + "] could not be loaded", e);
|
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. the script [{}] could not be loaded", e, script, heuristicName);
|
||||||
}
|
}
|
||||||
return new ScriptHeuristic(searchScript, script);
|
return new ScriptHeuristic(searchScript, script);
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,17 +20,14 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
|
package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
|
||||||
|
|
||||||
import org.elasticsearch.common.ParseField;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.query.QueryParsingException;
|
import org.elasticsearch.index.query.QueryParsingException;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.EnumSet;
|
|
||||||
|
|
||||||
public interface SignificanceHeuristicParser {
|
public interface SignificanceHeuristicParser {
|
||||||
|
|
||||||
public SignificanceHeuristic parse(XContentParser parser) throws IOException, QueryParsingException;
|
SignificanceHeuristic parse(XContentParser parser) throws IOException, QueryParsingException;
|
||||||
|
|
||||||
public String[] getNames();
|
String[] getNames();
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,10 +26,8 @@ import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.search.SearchParseException;
|
|
||||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||||
import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser;
|
import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.text.ParseException;
|
import java.text.ParseException;
|
||||||
|
@ -59,12 +57,8 @@ public class HoltWintersModel extends MovAvgModel {
|
||||||
SeasonalityType result = null;
|
SeasonalityType result = null;
|
||||||
for (SeasonalityType policy : values()) {
|
for (SeasonalityType policy : values()) {
|
||||||
if (policy.parseField.match(text)) {
|
if (policy.parseField.match(text)) {
|
||||||
if (result == null) {
|
result = policy;
|
||||||
result = policy;
|
break;
|
||||||
} else {
|
|
||||||
throw new IllegalStateException("Text can be parsed to 2 different seasonality types: text=[" + text
|
|
||||||
+ "], " + "policies=" + Arrays.asList(result, policy));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (result == null) {
|
if (result == null) {
|
||||||
|
@ -72,7 +66,7 @@ public class HoltWintersModel extends MovAvgModel {
|
||||||
for (SeasonalityType policy : values()) {
|
for (SeasonalityType policy : values()) {
|
||||||
validNames.add(policy.getName());
|
validNames.add(policy.getName());
|
||||||
}
|
}
|
||||||
throw new ElasticsearchParseException("Invalid seasonality type: [" + text + "], accepted values: " + validNames);
|
throw new ElasticsearchParseException("failed to parse seasonality type [{}]. accepted values are [{}]", text, validNames);
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
|
@ -74,7 +74,7 @@ public class FetchSourceParseElement implements SearchParseElement {
|
||||||
} else if ("excludes".equals(currentFieldName) || "exclude".equals(currentFieldName)) {
|
} else if ("excludes".equals(currentFieldName) || "exclude".equals(currentFieldName)) {
|
||||||
currentList = excludes != null ? excludes : (excludes = new ArrayList<>(2));
|
currentList = excludes != null ? excludes : (excludes = new ArrayList<>(2));
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("Source definition may not contain " + parser.text());
|
throw new ElasticsearchParseException("source definition may not contain [{}]", parser.text());
|
||||||
}
|
}
|
||||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||||
|
@ -87,7 +87,7 @@ public class FetchSourceParseElement implements SearchParseElement {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("source element value can be of type " + token.name());
|
throw new ElasticsearchParseException("source element value can be of type [{}]", token.name());
|
||||||
}
|
}
|
||||||
|
|
||||||
return new FetchSourceContext(
|
return new FetchSourceContext(
|
||||||
|
|
|
@ -151,7 +151,7 @@ public class LeafFieldsLookup implements Map {
|
||||||
fieldVisitor.postProcess(data.fieldType());
|
fieldVisitor.postProcess(data.fieldType());
|
||||||
data.fields(ImmutableMap.of(name, fieldVisitor.fields().get(data.fieldType().names().indexName())));
|
data.fields(ImmutableMap.of(name, fieldVisitor.fields().get(data.fieldType().names().indexName())));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new ElasticsearchParseException("failed to load field [" + name + "]", e);
|
throw new ElasticsearchParseException("failed to load field [{}]", e, name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return data;
|
return data;
|
||||||
|
|
|
@ -218,7 +218,7 @@ public class GeoDistanceSortParser implements SortParser {
|
||||||
double lon = parser.doubleValue();
|
double lon = parser.doubleValue();
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
if (!parser.currentToken().equals(XContentParser.Token.VALUE_NUMBER)) {
|
if (!parser.currentToken().equals(XContentParser.Token.VALUE_NUMBER)) {
|
||||||
throw new ElasticsearchParseException("geo point parsing: expected second number but got" + parser.currentToken());
|
throw new ElasticsearchParseException("geo point parsing: expected second number but got [{}] instead", parser.currentToken());
|
||||||
}
|
}
|
||||||
double lat = parser.doubleValue();
|
double lat = parser.doubleValue();
|
||||||
GeoPoint point = new GeoPoint();
|
GeoPoint point = new GeoPoint();
|
||||||
|
|
|
@ -115,7 +115,7 @@ public abstract class ContextBuilder<E extends ContextMapping> {
|
||||||
final Object argType = config.get(ContextMapping.FIELD_TYPE);
|
final Object argType = config.get(ContextMapping.FIELD_TYPE);
|
||||||
|
|
||||||
if (argType == null) {
|
if (argType == null) {
|
||||||
throw new ElasticsearchParseException("missing [" + ContextMapping.FIELD_TYPE + "] in context mapping");
|
throw new ElasticsearchParseException("missing [{}] in context mapping", ContextMapping.FIELD_TYPE);
|
||||||
}
|
}
|
||||||
|
|
||||||
final String type = argType.toString();
|
final String type = argType.toString();
|
||||||
|
@ -125,7 +125,7 @@ public abstract class ContextBuilder<E extends ContextMapping> {
|
||||||
} else if (CategoryContextMapping.TYPE.equals(type)) {
|
} else if (CategoryContextMapping.TYPE.equals(type)) {
|
||||||
contextMapping = CategoryContextMapping.load(name, config);
|
contextMapping = CategoryContextMapping.load(name, config);
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("unknown context type[" + type + "]");
|
throw new ElasticsearchParseException("unknown context type [{}]", type);
|
||||||
}
|
}
|
||||||
config.remove(ContextMapping.FIELD_TYPE);
|
config.remove(ContextMapping.FIELD_TYPE);
|
||||||
DocumentMapperParser.checkNoRemainingFields(name, config, indexVersionCreated);
|
DocumentMapperParser.checkNoRemainingFields(name, config, indexVersionCreated);
|
||||||
|
|
|
@ -293,7 +293,7 @@ public abstract class ContextMapping implements ToXContent {
|
||||||
String name = parser.text();
|
String name = parser.text();
|
||||||
ContextMapping mapping = mappings.get(name);
|
ContextMapping mapping = mappings.get(name);
|
||||||
if (mapping == null) {
|
if (mapping == null) {
|
||||||
throw new ElasticsearchParseException("no mapping defined for [" + name + "]");
|
throw new ElasticsearchParseException("no mapping defined for [{}]", name);
|
||||||
}
|
}
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
querySet.put(name, mapping.parseQuery(name, parser));
|
querySet.put(name, mapping.parseQuery(name, parser));
|
||||||
|
|
|
@ -174,14 +174,14 @@ public class GeolocationContextMapping extends ContextMapping {
|
||||||
} else if (def instanceof Map) {
|
} else if (def instanceof Map) {
|
||||||
Map<String, Object> latlonMap = (Map<String, Object>) def;
|
Map<String, Object> latlonMap = (Map<String, Object>) def;
|
||||||
if (!latlonMap.containsKey("lat") || !(latlonMap.get("lat") instanceof Double)) {
|
if (!latlonMap.containsKey("lat") || !(latlonMap.get("lat") instanceof Double)) {
|
||||||
throw new ElasticsearchParseException("field [" + FIELD_MISSING + "] map must have field lat and a valid latitude");
|
throw new ElasticsearchParseException("field [{}] map must have field lat and a valid latitude", FIELD_MISSING);
|
||||||
}
|
}
|
||||||
if (!latlonMap.containsKey("lon") || !(latlonMap.get("lon") instanceof Double)) {
|
if (!latlonMap.containsKey("lon") || !(latlonMap.get("lon") instanceof Double)) {
|
||||||
throw new ElasticsearchParseException("field [" + FIELD_MISSING + "] map must have field lon and a valid longitude");
|
throw new ElasticsearchParseException("field [{}] map must have field lon and a valid longitude", FIELD_MISSING);
|
||||||
}
|
}
|
||||||
builder.addDefaultLocation(Double.valueOf(latlonMap.get("lat").toString()), Double.valueOf(latlonMap.get("lon").toString()));
|
builder.addDefaultLocation(Double.valueOf(latlonMap.get("lat").toString()), Double.valueOf(latlonMap.get("lon").toString()));
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("field [" + FIELD_MISSING + "] must be of type string or list");
|
throw new ElasticsearchParseException("field [{}] must be of type string or list", FIELD_MISSING);
|
||||||
}
|
}
|
||||||
config.remove(FIELD_MISSING);
|
config.remove(FIELD_MISSING);
|
||||||
}
|
}
|
||||||
|
@ -350,7 +350,7 @@ public class GeolocationContextMapping extends ContextMapping {
|
||||||
throw new ElasticsearchParseException("latitude must be a number");
|
throw new ElasticsearchParseException("latitude must be a number");
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("only lat/lon or [" + FIELD_VALUE + "] is allowed");
|
throw new ElasticsearchParseException("only lat/lon or [{}] is allowed", FIELD_VALUE);
|
||||||
}
|
}
|
||||||
} else if ("lon".equals(fieldName)) {
|
} else if ("lon".equals(fieldName)) {
|
||||||
if(point == null) {
|
if(point == null) {
|
||||||
|
@ -364,7 +364,7 @@ public class GeolocationContextMapping extends ContextMapping {
|
||||||
throw new ElasticsearchParseException("longitude must be a number");
|
throw new ElasticsearchParseException("longitude must be a number");
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("only lat/lon or [" + FIELD_VALUE + "] is allowed");
|
throw new ElasticsearchParseException("only lat/lon or [{}] is allowed", FIELD_VALUE);
|
||||||
}
|
}
|
||||||
} else if (FIELD_PRECISION.equals(fieldName)) {
|
} else if (FIELD_PRECISION.equals(fieldName)) {
|
||||||
if(parser.nextToken() == Token.START_ARRAY) {
|
if(parser.nextToken() == Token.START_ARRAY) {
|
||||||
|
@ -381,10 +381,10 @@ public class GeolocationContextMapping extends ContextMapping {
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
point = GeoUtils.parseGeoPoint(parser);
|
point = GeoUtils.parseGeoPoint(parser);
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("only lat/lon or [" + FIELD_VALUE + "] is allowed");
|
throw new ElasticsearchParseException("only lat/lon or [{}] is allowed", FIELD_VALUE);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("unexpected fieldname [" + fieldName + "]");
|
throw new ElasticsearchParseException("unexpected fieldname [{}]", fieldName);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -201,15 +201,15 @@ public class SnapshotShardFailure implements ShardOperationFailedException {
|
||||||
} else if ("status".equals(currentFieldName)) {
|
} else if ("status".equals(currentFieldName)) {
|
||||||
snapshotShardFailure.status = RestStatus.valueOf(parser.text());
|
snapshotShardFailure.status = RestStatus.valueOf(parser.text());
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("unknown parameter [" + currentFieldName + "]");
|
throw new ElasticsearchParseException("unknown parameter [{}]", currentFieldName);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("unexpected token [" + token + "]");
|
throw new ElasticsearchParseException("unexpected token [{}]", token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("unexpected token [" + token + "]");
|
throw new ElasticsearchParseException("unexpected token [{}]", token);
|
||||||
}
|
}
|
||||||
return snapshotShardFailure;
|
return snapshotShardFailure;
|
||||||
}
|
}
|
||||||
|
|
|
@ -2297,7 +2297,7 @@ public class SimpleIndexQueryParserTests extends ElasticsearchSingleNodeTest {
|
||||||
queryParser.parse(query).query();
|
queryParser.parse(query).query();
|
||||||
fail("FunctionScoreQueryParser should throw an exception here because two functions in body are not allowed.");
|
fail("FunctionScoreQueryParser should throw an exception here because two functions in body are not allowed.");
|
||||||
} catch (QueryParsingException e) {
|
} catch (QueryParsingException e) {
|
||||||
assertThat(e.getDetailedMessage(), containsString("Use functions[{...},...] if you want to define several functions."));
|
assertThat(e.getDetailedMessage(), containsString("use [functions] array if you want to define several functions."));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2353,7 +2353,7 @@ public class SimpleIndexQueryParserTests extends ElasticsearchSingleNodeTest {
|
||||||
queryParser.parse(query).query();
|
queryParser.parse(query).query();
|
||||||
fail("Expect exception here because array of functions and one weight in body is not allowed.");
|
fail("Expect exception here because array of functions and one weight in body is not allowed.");
|
||||||
} catch (QueryParsingException e) {
|
} catch (QueryParsingException e) {
|
||||||
assertThat(e.getDetailedMessage(), containsString("You can either define \"functions\":[...] or a single function, not both. Found \"functions\": [...] already, now encountering \"weight\"."));
|
assertThat(e.getDetailedMessage(), containsString("you can either define [functions] array or a single function, not both. already found [functions] array, now encountering [weight]."));
|
||||||
}
|
}
|
||||||
query = jsonBuilder().startObject().startObject("function_score")
|
query = jsonBuilder().startObject().startObject("function_score")
|
||||||
.field("weight", 2)
|
.field("weight", 2)
|
||||||
|
@ -2365,7 +2365,7 @@ public class SimpleIndexQueryParserTests extends ElasticsearchSingleNodeTest {
|
||||||
queryParser.parse(query).query();
|
queryParser.parse(query).query();
|
||||||
fail("Expect exception here because array of functions and one weight in body is not allowed.");
|
fail("Expect exception here because array of functions and one weight in body is not allowed.");
|
||||||
} catch (QueryParsingException e) {
|
} catch (QueryParsingException e) {
|
||||||
assertThat(e.getDetailedMessage(), containsString("You can either define \"functions\":[...] or a single function, not both. Found \"weight\" already, now encountering \"functions\": [...]."));
|
assertThat(e.getDetailedMessage(), containsString("you can either define [functions] array or a single function, not both. already found [weight], now encountering [functions]."));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -168,19 +168,19 @@ public class SignificanceHeuristicTests extends ElasticsearchTestCase {
|
||||||
|
|
||||||
// test exceptions
|
// test exceptions
|
||||||
String faultyHeuristicdefinition = "\"mutual_information\":{\"include_negatives\": false, \"some_unknown_field\": false}";
|
String faultyHeuristicdefinition = "\"mutual_information\":{\"include_negatives\": false, \"some_unknown_field\": false}";
|
||||||
String expectedError = "unknown for mutual_information";
|
String expectedError = "unknown field [some_unknown_field]";
|
||||||
checkParseException(heuristicParserMapper, searchContext, faultyHeuristicdefinition, expectedError);
|
checkParseException(heuristicParserMapper, searchContext, faultyHeuristicdefinition, expectedError);
|
||||||
|
|
||||||
faultyHeuristicdefinition = "\"chi_square\":{\"unknown_field\": true}";
|
faultyHeuristicdefinition = "\"chi_square\":{\"unknown_field\": true}";
|
||||||
expectedError = "unknown for chi_square";
|
expectedError = "unknown field [unknown_field]";
|
||||||
checkParseException(heuristicParserMapper, searchContext, faultyHeuristicdefinition, expectedError);
|
checkParseException(heuristicParserMapper, searchContext, faultyHeuristicdefinition, expectedError);
|
||||||
|
|
||||||
faultyHeuristicdefinition = "\"jlh\":{\"unknown_field\": true}";
|
faultyHeuristicdefinition = "\"jlh\":{\"unknown_field\": true}";
|
||||||
expectedError = "expected }, got ";
|
expectedError = "expected an empty object, but found ";
|
||||||
checkParseException(heuristicParserMapper, searchContext, faultyHeuristicdefinition, expectedError);
|
checkParseException(heuristicParserMapper, searchContext, faultyHeuristicdefinition, expectedError);
|
||||||
|
|
||||||
faultyHeuristicdefinition = "\"gnd\":{\"unknown_field\": true}";
|
faultyHeuristicdefinition = "\"gnd\":{\"unknown_field\": true}";
|
||||||
expectedError = "unknown for gnd";
|
expectedError = "unknown field [unknown_field]";
|
||||||
checkParseException(heuristicParserMapper, searchContext, faultyHeuristicdefinition, expectedError);
|
checkParseException(heuristicParserMapper, searchContext, faultyHeuristicdefinition, expectedError);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -46,23 +46,11 @@ import java.util.concurrent.ExecutionException;
|
||||||
import static org.elasticsearch.client.Requests.indexRequest;
|
import static org.elasticsearch.client.Requests.indexRequest;
|
||||||
import static org.elasticsearch.client.Requests.searchRequest;
|
import static org.elasticsearch.client.Requests.searchRequest;
|
||||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
|
import static org.elasticsearch.index.query.QueryBuilders.*;
|
||||||
import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery;
|
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.*;
|
||||||
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
|
||||||
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.exponentialDecayFunction;
|
|
||||||
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.gaussDecayFunction;
|
|
||||||
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.linearDecayFunction;
|
|
||||||
import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource;
|
import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
import static org.hamcrest.Matchers.*;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits;
|
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits;
|
|
||||||
import static org.hamcrest.Matchers.anyOf;
|
|
||||||
import static org.hamcrest.Matchers.closeTo;
|
|
||||||
import static org.hamcrest.Matchers.containsString;
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
|
||||||
import static org.hamcrest.Matchers.isOneOf;
|
|
||||||
import static org.hamcrest.Matchers.lessThan;
|
|
||||||
|
|
||||||
public class DecayFunctionScoreTests extends ElasticsearchIntegrationTest {
|
public class DecayFunctionScoreTests extends ElasticsearchIntegrationTest {
|
||||||
|
|
||||||
|
@ -816,7 +804,7 @@ public class DecayFunctionScoreTests extends ElasticsearchIntegrationTest {
|
||||||
fail("Search should result in SearchPhaseExecutionException");
|
fail("Search should result in SearchPhaseExecutionException");
|
||||||
} catch (SearchPhaseExecutionException e) {
|
} catch (SearchPhaseExecutionException e) {
|
||||||
logger.info(e.shardFailures()[0].reason());
|
logger.info(e.shardFailures()[0].reason());
|
||||||
assertTrue(e.shardFailures()[0].reason().contains("Found \"functions\": [...] already, now encountering \"boost_factor\". Did you mean \"boost\" instead?"));
|
assertThat(e.shardFailures()[0].reason(), containsString("already found [functions] array, now encountering [boost_factor]. did you mean [boost] instead?"));
|
||||||
}
|
}
|
||||||
|
|
||||||
query = XContentFactory.jsonBuilder();
|
query = XContentFactory.jsonBuilder();
|
||||||
|
@ -829,7 +817,7 @@ public class DecayFunctionScoreTests extends ElasticsearchIntegrationTest {
|
||||||
fail("Search should result in SearchPhaseExecutionException");
|
fail("Search should result in SearchPhaseExecutionException");
|
||||||
} catch (SearchPhaseExecutionException e) {
|
} catch (SearchPhaseExecutionException e) {
|
||||||
logger.info(e.shardFailures()[0].reason());
|
logger.info(e.shardFailures()[0].reason());
|
||||||
assertTrue(e.shardFailures()[0].reason().contains("Found \"boost_factor\" already, now encountering \"functions\": [...]. Did you mean \"boost\" instead?"));
|
assertThat(e.shardFailures()[0].reason(), containsString("already found [boost_factor], now encountering [functions]. did you mean [boost] instead?"));
|
||||||
}
|
}
|
||||||
|
|
||||||
query = XContentFactory.jsonBuilder();
|
query = XContentFactory.jsonBuilder();
|
||||||
|
@ -842,8 +830,8 @@ public class DecayFunctionScoreTests extends ElasticsearchIntegrationTest {
|
||||||
fail("Search should result in SearchPhaseExecutionException");
|
fail("Search should result in SearchPhaseExecutionException");
|
||||||
} catch (SearchPhaseExecutionException e) {
|
} catch (SearchPhaseExecutionException e) {
|
||||||
logger.info(e.shardFailures()[0].reason());
|
logger.info(e.shardFailures()[0].reason());
|
||||||
assertTrue(e.shardFailures()[0].reason().contains("Found \"random_score\" already, now encountering \"functions\": [...]."));
|
assertThat(e.shardFailures()[0].reason(), containsString("already found [random_score], now encountering [functions]"));
|
||||||
assertFalse(e.shardFailures()[0].reason().contains("Did you mean \"boost\" instead?"));
|
assertThat(e.shardFailures()[0].reason(), not(containsString("did you mean [boost] instead?")));
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -882,8 +870,8 @@ public class DecayFunctionScoreTests extends ElasticsearchIntegrationTest {
|
||||||
searchSource().query(query))).actionGet();
|
searchSource().query(query))).actionGet();
|
||||||
fail("Should fail with SearchPhaseExecutionException");
|
fail("Should fail with SearchPhaseExecutionException");
|
||||||
} catch (SearchPhaseExecutionException failure) {
|
} catch (SearchPhaseExecutionException failure) {
|
||||||
assertTrue(failure.toString().contains("SearchParseException"));
|
assertThat(failure.toString(), containsString("SearchParseException"));
|
||||||
assertFalse(failure.toString().contains("NullPointerException"));
|
assertThat(failure.toString(), not(containsString("NullPointerException")));
|
||||||
}
|
}
|
||||||
|
|
||||||
query = "{\n" +
|
query = "{\n" +
|
||||||
|
@ -916,26 +904,26 @@ public class DecayFunctionScoreTests extends ElasticsearchIntegrationTest {
|
||||||
searchSource().query(query))).actionGet();
|
searchSource().query(query))).actionGet();
|
||||||
fail("Should fail with SearchPhaseExecutionException");
|
fail("Should fail with SearchPhaseExecutionException");
|
||||||
} catch (SearchPhaseExecutionException failure) {
|
} catch (SearchPhaseExecutionException failure) {
|
||||||
assertTrue(failure.toString().contains("SearchParseException"));
|
assertThat(failure.toString(), containsString("SearchParseException"));
|
||||||
assertFalse(failure.toString().contains("NullPointerException"));
|
assertThat(failure.toString(), not(containsString("NullPointerException")));
|
||||||
assertTrue(failure.toString().contains("One entry in functions list is missing a function"));
|
assertThat(failure.toString(), containsString("an entry in functions list is missing a function"));
|
||||||
}
|
}
|
||||||
|
|
||||||
// next test java client
|
// next test java client
|
||||||
try {
|
try {
|
||||||
client().prepareSearch("t").setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.matchAllQuery(), null)).get();
|
client().prepareSearch("t").setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.matchAllQuery(), null)).get();
|
||||||
} catch (IllegalArgumentException failure) {
|
} catch (IllegalArgumentException failure) {
|
||||||
assertTrue(failure.toString().contains("function must not be null"));
|
assertThat(failure.toString(), containsString("function must not be null"));
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
client().prepareSearch("t").setQuery(QueryBuilders.functionScoreQuery().add(QueryBuilders.matchAllQuery(), null)).get();
|
client().prepareSearch("t").setQuery(QueryBuilders.functionScoreQuery().add(QueryBuilders.matchAllQuery(), null)).get();
|
||||||
} catch (IllegalArgumentException failure) {
|
} catch (IllegalArgumentException failure) {
|
||||||
assertTrue(failure.toString().contains("function must not be null"));
|
assertThat(failure.toString(), containsString("function must not be null"));
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
client().prepareSearch("t").setQuery(QueryBuilders.functionScoreQuery().add(null)).get();
|
client().prepareSearch("t").setQuery(QueryBuilders.functionScoreQuery().add(null)).get();
|
||||||
} catch (IllegalArgumentException failure) {
|
} catch (IllegalArgumentException failure) {
|
||||||
assertTrue(failure.toString().contains("function must not be null"));
|
assertThat(failure.toString(), containsString("function must not be null"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -960,7 +960,7 @@ public class DedicatedClusterSnapshotRestoreTests extends AbstractSnapshotTests
|
||||||
}
|
}
|
||||||
data = parser.text();
|
data = parser.text();
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("failed to parse snapshottable metadata, unknown field [" + currentFieldName + "]");
|
throw new ElasticsearchParseException("failed to parse snapshottable metadata, unknown field [{}]", currentFieldName);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("failed to parse snapshottable metadata");
|
throw new ElasticsearchParseException("failed to parse snapshottable metadata");
|
||||||
|
|
Loading…
Reference in New Issue