Standardize underscore requirements in parameters (#27414)
Stardardize underscore requirements in parameters across different type of requests: _index, _type, _source, _id keep their underscores params like version and retry_on_conflict will be without underscores Throw an error if older versions of parameters are used BulkRequest, MultiGetRequest, TermVectorcRequest, MoreLikeThisQuery were changed Closes #26886
This commit is contained in:
parent
a5df2ef538
commit
858b2c7cb8
|
@ -191,23 +191,23 @@ public final class Request {
|
||||||
metadata.field("_id", request.id());
|
metadata.field("_id", request.id());
|
||||||
}
|
}
|
||||||
if (Strings.hasLength(request.routing())) {
|
if (Strings.hasLength(request.routing())) {
|
||||||
metadata.field("_routing", request.routing());
|
metadata.field("routing", request.routing());
|
||||||
}
|
}
|
||||||
if (Strings.hasLength(request.parent())) {
|
if (Strings.hasLength(request.parent())) {
|
||||||
metadata.field("_parent", request.parent());
|
metadata.field("parent", request.parent());
|
||||||
}
|
}
|
||||||
if (request.version() != Versions.MATCH_ANY) {
|
if (request.version() != Versions.MATCH_ANY) {
|
||||||
metadata.field("_version", request.version());
|
metadata.field("version", request.version());
|
||||||
}
|
}
|
||||||
|
|
||||||
VersionType versionType = request.versionType();
|
VersionType versionType = request.versionType();
|
||||||
if (versionType != VersionType.INTERNAL) {
|
if (versionType != VersionType.INTERNAL) {
|
||||||
if (versionType == VersionType.EXTERNAL) {
|
if (versionType == VersionType.EXTERNAL) {
|
||||||
metadata.field("_version_type", "external");
|
metadata.field("version_type", "external");
|
||||||
} else if (versionType == VersionType.EXTERNAL_GTE) {
|
} else if (versionType == VersionType.EXTERNAL_GTE) {
|
||||||
metadata.field("_version_type", "external_gte");
|
metadata.field("version_type", "external_gte");
|
||||||
} else if (versionType == VersionType.FORCE) {
|
} else if (versionType == VersionType.FORCE) {
|
||||||
metadata.field("_version_type", "force");
|
metadata.field("version_type", "force");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -219,7 +219,7 @@ public final class Request {
|
||||||
} else if (opType == DocWriteRequest.OpType.UPDATE) {
|
} else if (opType == DocWriteRequest.OpType.UPDATE) {
|
||||||
UpdateRequest updateRequest = (UpdateRequest) request;
|
UpdateRequest updateRequest = (UpdateRequest) request;
|
||||||
if (updateRequest.retryOnConflict() > 0) {
|
if (updateRequest.retryOnConflict() > 0) {
|
||||||
metadata.field("_retry_on_conflict", updateRequest.retryOnConflict());
|
metadata.field("retry_on_conflict", updateRequest.retryOnConflict());
|
||||||
}
|
}
|
||||||
if (updateRequest.fetchSource() != null) {
|
if (updateRequest.fetchSource() != null) {
|
||||||
metadata.field("_source", updateRequest.fetchSource());
|
metadata.field("_source", updateRequest.fetchSource());
|
||||||
|
|
|
@ -30,6 +30,7 @@ import org.elasticsearch.action.support.WriteRequest;
|
||||||
import org.elasticsearch.action.support.replication.ReplicationRequest;
|
import org.elasticsearch.action.support.replication.ReplicationRequest;
|
||||||
import org.elasticsearch.action.update.UpdateRequest;
|
import org.elasticsearch.action.update.UpdateRequest;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
|
@ -68,6 +69,19 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
||||||
|
|
||||||
private static final int REQUEST_OVERHEAD = 50;
|
private static final int REQUEST_OVERHEAD = 50;
|
||||||
|
|
||||||
|
private static final ParseField INDEX = new ParseField("_index");
|
||||||
|
private static final ParseField TYPE = new ParseField("_type");
|
||||||
|
private static final ParseField ID = new ParseField("_id");
|
||||||
|
private static final ParseField ROUTING = new ParseField("routing");
|
||||||
|
private static final ParseField PARENT = new ParseField("parent");
|
||||||
|
private static final ParseField OP_TYPE = new ParseField("op_type");
|
||||||
|
private static final ParseField VERSION = new ParseField("version");
|
||||||
|
private static final ParseField VERSION_TYPE = new ParseField("version_type");
|
||||||
|
private static final ParseField RETRY_ON_CONFLICT = new ParseField("retry_on_conflict");
|
||||||
|
private static final ParseField PIPELINE = new ParseField("pipeline");
|
||||||
|
private static final ParseField FIELDS = new ParseField("fields");
|
||||||
|
private static final ParseField SOURCE = new ParseField("_source");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Requests that are part of this request. It is only possible to add things that are both {@link ActionRequest}s and
|
* Requests that are part of this request. It is only possible to add things that are both {@link ActionRequest}s and
|
||||||
* {@link WriteRequest}s to this but java doesn't support syntax to declare that everything in the array has both types so we declare
|
* {@link WriteRequest}s to this but java doesn't support syntax to declare that everything in the array has both types so we declare
|
||||||
|
@ -334,45 +348,45 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
if (token == XContentParser.Token.FIELD_NAME) {
|
||||||
currentFieldName = parser.currentName();
|
currentFieldName = parser.currentName();
|
||||||
} else if (token.isValue()) {
|
} else if (token.isValue()) {
|
||||||
if ("_index".equals(currentFieldName)) {
|
if (INDEX.match(currentFieldName)){
|
||||||
if (!allowExplicitIndex) {
|
if (!allowExplicitIndex) {
|
||||||
throw new IllegalArgumentException("explicit index in bulk is not allowed");
|
throw new IllegalArgumentException("explicit index in bulk is not allowed");
|
||||||
}
|
}
|
||||||
index = parser.text();
|
index = parser.text();
|
||||||
} else if ("_type".equals(currentFieldName)) {
|
} else if (TYPE.match(currentFieldName)) {
|
||||||
type = parser.text();
|
type = parser.text();
|
||||||
} else if ("_id".equals(currentFieldName)) {
|
} else if (ID.match(currentFieldName)) {
|
||||||
id = parser.text();
|
id = parser.text();
|
||||||
} else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) {
|
} else if (ROUTING.match(currentFieldName)) {
|
||||||
routing = parser.text();
|
routing = parser.text();
|
||||||
} else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) {
|
} else if (PARENT.match(currentFieldName)) {
|
||||||
parent = parser.text();
|
parent = parser.text();
|
||||||
} else if ("op_type".equals(currentFieldName) || "opType".equals(currentFieldName)) {
|
} else if (OP_TYPE.match(currentFieldName)) {
|
||||||
opType = parser.text();
|
opType = parser.text();
|
||||||
} else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) {
|
} else if (VERSION.match(currentFieldName)) {
|
||||||
version = parser.longValue();
|
version = parser.longValue();
|
||||||
} else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) {
|
} else if (VERSION_TYPE.match(currentFieldName)) {
|
||||||
versionType = VersionType.fromString(parser.text());
|
versionType = VersionType.fromString(parser.text());
|
||||||
} else if ("_retry_on_conflict".equals(currentFieldName) || "_retryOnConflict".equals(currentFieldName)) {
|
} else if (RETRY_ON_CONFLICT.match(currentFieldName)) {
|
||||||
retryOnConflict = parser.intValue();
|
retryOnConflict = parser.intValue();
|
||||||
} else if ("pipeline".equals(currentFieldName)) {
|
} else if (PIPELINE.match(currentFieldName)) {
|
||||||
pipeline = parser.text();
|
pipeline = parser.text();
|
||||||
} else if ("fields".equals(currentFieldName)) {
|
} else if (FIELDS.match(currentFieldName)) {
|
||||||
throw new IllegalArgumentException("Action/metadata line [" + line + "] contains a simple value for parameter [fields] while a list is expected");
|
throw new IllegalArgumentException("Action/metadata line [" + line + "] contains a simple value for parameter [fields] while a list is expected");
|
||||||
} else if ("_source".equals(currentFieldName)) {
|
} else if (SOURCE.match(currentFieldName)) {
|
||||||
fetchSourceContext = FetchSourceContext.fromXContent(parser);
|
fetchSourceContext = FetchSourceContext.fromXContent(parser);
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalArgumentException("Action/metadata line [" + line + "] contains an unknown parameter [" + currentFieldName + "]");
|
throw new IllegalArgumentException("Action/metadata line [" + line + "] contains an unknown parameter [" + currentFieldName + "]");
|
||||||
}
|
}
|
||||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||||
if ("fields".equals(currentFieldName)) {
|
if (FIELDS.match(currentFieldName)) {
|
||||||
DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead");
|
DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead");
|
||||||
List<Object> values = parser.list();
|
List<Object> values = parser.list();
|
||||||
fields = values.toArray(new String[values.size()]);
|
fields = values.toArray(new String[values.size()]);
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]");
|
throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]");
|
||||||
}
|
}
|
||||||
} else if (token == XContentParser.Token.START_OBJECT && "_source".equals(currentFieldName)) {
|
} else if (token == XContentParser.Token.START_OBJECT && SOURCE.match(currentFieldName)) {
|
||||||
fetchSourceContext = FetchSourceContext.fromXContent(parser);
|
fetchSourceContext = FetchSourceContext.fromXContent(parser);
|
||||||
} else if (token != XContentParser.Token.VALUE_NULL) {
|
} else if (token != XContentParser.Token.VALUE_NULL) {
|
||||||
throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]");
|
throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]");
|
||||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.action.RealtimeRequest;
|
||||||
import org.elasticsearch.action.ValidateActions;
|
import org.elasticsearch.action.ValidateActions;
|
||||||
import org.elasticsearch.action.support.IndicesOptions;
|
import org.elasticsearch.action.support.IndicesOptions;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
|
@ -48,6 +49,17 @@ import java.util.Locale;
|
||||||
|
|
||||||
public class MultiGetRequest extends ActionRequest implements Iterable<MultiGetRequest.Item>, CompositeIndicesRequest, RealtimeRequest {
|
public class MultiGetRequest extends ActionRequest implements Iterable<MultiGetRequest.Item>, CompositeIndicesRequest, RealtimeRequest {
|
||||||
|
|
||||||
|
private static final ParseField INDEX = new ParseField("_index");
|
||||||
|
private static final ParseField TYPE = new ParseField("_type");
|
||||||
|
private static final ParseField ID = new ParseField("_id");
|
||||||
|
private static final ParseField ROUTING = new ParseField("routing");
|
||||||
|
private static final ParseField PARENT = new ParseField("parent");
|
||||||
|
private static final ParseField VERSION = new ParseField("version");
|
||||||
|
private static final ParseField VERSION_TYPE = new ParseField("version_type");
|
||||||
|
private static final ParseField FIELDS = new ParseField("fields");
|
||||||
|
private static final ParseField STORED_FIELDS = new ParseField("stored_fields");
|
||||||
|
private static final ParseField SOURCE = new ParseField("_source");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A single get item.
|
* A single get item.
|
||||||
*/
|
*/
|
||||||
|
@ -379,30 +391,30 @@ public class MultiGetRequest extends ActionRequest implements Iterable<MultiGetR
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
if (token == XContentParser.Token.FIELD_NAME) {
|
||||||
currentFieldName = parser.currentName();
|
currentFieldName = parser.currentName();
|
||||||
} else if (token.isValue()) {
|
} else if (token.isValue()) {
|
||||||
if ("_index".equals(currentFieldName)) {
|
if (INDEX.match(currentFieldName)) {
|
||||||
if (!allowExplicitIndex) {
|
if (!allowExplicitIndex) {
|
||||||
throw new IllegalArgumentException("explicit index in multi get is not allowed");
|
throw new IllegalArgumentException("explicit index in multi get is not allowed");
|
||||||
}
|
}
|
||||||
index = parser.text();
|
index = parser.text();
|
||||||
} else if ("_type".equals(currentFieldName)) {
|
} else if (TYPE.match(currentFieldName)) {
|
||||||
type = parser.text();
|
type = parser.text();
|
||||||
} else if ("_id".equals(currentFieldName)) {
|
} else if (ID.match(currentFieldName)) {
|
||||||
id = parser.text();
|
id = parser.text();
|
||||||
} else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) {
|
} else if (ROUTING.match(currentFieldName)) {
|
||||||
routing = parser.text();
|
routing = parser.text();
|
||||||
} else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) {
|
} else if (PARENT.match(currentFieldName)) {
|
||||||
parent = parser.text();
|
parent = parser.text();
|
||||||
} else if ("fields".equals(currentFieldName)) {
|
} else if (FIELDS.match(currentFieldName)) {
|
||||||
throw new ParsingException(parser.getTokenLocation(),
|
throw new ParsingException(parser.getTokenLocation(),
|
||||||
"Unsupported field [fields] used, expected [stored_fields] instead");
|
"Unsupported field [fields] used, expected [stored_fields] instead");
|
||||||
} else if ("stored_fields".equals(currentFieldName)) {
|
} else if (STORED_FIELDS.match(currentFieldName)) {
|
||||||
storedFields = new ArrayList<>();
|
storedFields = new ArrayList<>();
|
||||||
storedFields.add(parser.text());
|
storedFields.add(parser.text());
|
||||||
} else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) {
|
} else if (VERSION.match(currentFieldName)) {
|
||||||
version = parser.longValue();
|
version = parser.longValue();
|
||||||
} else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) {
|
} else if (VERSION_TYPE.match(currentFieldName)) {
|
||||||
versionType = VersionType.fromString(parser.text());
|
versionType = VersionType.fromString(parser.text());
|
||||||
} else if ("_source".equals(currentFieldName)) {
|
} else if (SOURCE.match(currentFieldName)) {
|
||||||
// check lenient to avoid interpreting the value as string but parse strict in order to provoke an error early on.
|
// check lenient to avoid interpreting the value as string but parse strict in order to provoke an error early on.
|
||||||
if (parser.isBooleanValueLenient()) {
|
if (parser.isBooleanValueLenient()) {
|
||||||
fetchSourceContext = new FetchSourceContext(parser.booleanValue(), fetchSourceContext.includes(),
|
fetchSourceContext = new FetchSourceContext(parser.booleanValue(), fetchSourceContext.includes(),
|
||||||
|
@ -413,17 +425,19 @@ public class MultiGetRequest extends ActionRequest implements Iterable<MultiGetR
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("illegal type for _source: [{}]", token);
|
throw new ElasticsearchParseException("illegal type for _source: [{}]", token);
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
throw new ElasticsearchParseException("failed to parse multi get request. unknown field [{}]", currentFieldName);
|
||||||
}
|
}
|
||||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||||
if ("fields".equals(currentFieldName)) {
|
if (FIELDS.match(currentFieldName)) {
|
||||||
throw new ParsingException(parser.getTokenLocation(),
|
throw new ParsingException(parser.getTokenLocation(),
|
||||||
"Unsupported field [fields] used, expected [stored_fields] instead");
|
"Unsupported field [fields] used, expected [stored_fields] instead");
|
||||||
} else if ("stored_fields".equals(currentFieldName)) {
|
} else if (STORED_FIELDS.match(currentFieldName)) {
|
||||||
storedFields = new ArrayList<>();
|
storedFields = new ArrayList<>();
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||||
storedFields.add(parser.text());
|
storedFields.add(parser.text());
|
||||||
}
|
}
|
||||||
} else if ("_source".equals(currentFieldName)) {
|
} else if (SOURCE.match(currentFieldName)) {
|
||||||
ArrayList<String> includes = new ArrayList<>();
|
ArrayList<String> includes = new ArrayList<>();
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||||
includes.add(parser.text());
|
includes.add(parser.text());
|
||||||
|
@ -433,7 +447,7 @@ public class MultiGetRequest extends ActionRequest implements Iterable<MultiGetR
|
||||||
}
|
}
|
||||||
|
|
||||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||||
if ("_source".equals(currentFieldName)) {
|
if (SOURCE.match(currentFieldName)) {
|
||||||
List<String> currentList = null, includes = null, excludes = null;
|
List<String> currentList = null, includes = null, excludes = null;
|
||||||
|
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
|
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.action.ValidateActions;
|
||||||
import org.elasticsearch.action.get.MultiGetRequest;
|
import org.elasticsearch.action.get.MultiGetRequest;
|
||||||
import org.elasticsearch.action.support.single.shard.SingleShardRequest;
|
import org.elasticsearch.action.support.single.shard.SingleShardRequest;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
|
@ -60,6 +61,22 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
*/
|
*/
|
||||||
public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> implements RealtimeRequest {
|
public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> implements RealtimeRequest {
|
||||||
|
|
||||||
|
private static final ParseField INDEX = new ParseField("_index");
|
||||||
|
private static final ParseField TYPE = new ParseField("_type");
|
||||||
|
private static final ParseField ID = new ParseField("_id");
|
||||||
|
private static final ParseField ROUTING = new ParseField("routing");
|
||||||
|
private static final ParseField PARENT = new ParseField("parent");
|
||||||
|
private static final ParseField VERSION = new ParseField("version");
|
||||||
|
private static final ParseField VERSION_TYPE = new ParseField("version_type");
|
||||||
|
private static final ParseField FIELDS = new ParseField("fields");
|
||||||
|
private static final ParseField OFFSETS = new ParseField("offsets");
|
||||||
|
private static final ParseField POSITIONS = new ParseField("positions");
|
||||||
|
private static final ParseField PAYLOADS = new ParseField("payloads");
|
||||||
|
private static final ParseField DFS = new ParseField("dfs");
|
||||||
|
private static final ParseField FILTER = new ParseField("filter");
|
||||||
|
private static final ParseField DOC = new ParseField("doc");
|
||||||
|
|
||||||
|
|
||||||
private String type;
|
private String type;
|
||||||
|
|
||||||
private String id;
|
private String id;
|
||||||
|
@ -593,7 +610,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
if (token == XContentParser.Token.FIELD_NAME) {
|
||||||
currentFieldName = parser.currentName();
|
currentFieldName = parser.currentName();
|
||||||
} else if (currentFieldName != null) {
|
} else if (currentFieldName != null) {
|
||||||
if (currentFieldName.equals("fields")) {
|
if (FIELDS.match(currentFieldName)) {
|
||||||
if (token == XContentParser.Token.START_ARRAY) {
|
if (token == XContentParser.Token.START_ARRAY) {
|
||||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||||
fields.add(parser.text());
|
fields.add(parser.text());
|
||||||
|
@ -601,43 +618,43 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("failed to parse term vectors request. field [fields] must be an array");
|
throw new ElasticsearchParseException("failed to parse term vectors request. field [fields] must be an array");
|
||||||
}
|
}
|
||||||
} else if (currentFieldName.equals("offsets")) {
|
} else if (OFFSETS.match(currentFieldName)) {
|
||||||
termVectorsRequest.offsets(parser.booleanValue());
|
termVectorsRequest.offsets(parser.booleanValue());
|
||||||
} else if (currentFieldName.equals("positions")) {
|
} else if (POSITIONS.match(currentFieldName)) {
|
||||||
termVectorsRequest.positions(parser.booleanValue());
|
termVectorsRequest.positions(parser.booleanValue());
|
||||||
} else if (currentFieldName.equals("payloads")) {
|
} else if (PAYLOADS.match(currentFieldName)) {
|
||||||
termVectorsRequest.payloads(parser.booleanValue());
|
termVectorsRequest.payloads(parser.booleanValue());
|
||||||
} else if (currentFieldName.equals("term_statistics") || currentFieldName.equals("termStatistics")) {
|
} else if (currentFieldName.equals("term_statistics") || currentFieldName.equals("termStatistics")) {
|
||||||
termVectorsRequest.termStatistics(parser.booleanValue());
|
termVectorsRequest.termStatistics(parser.booleanValue());
|
||||||
} else if (currentFieldName.equals("field_statistics") || currentFieldName.equals("fieldStatistics")) {
|
} else if (currentFieldName.equals("field_statistics") || currentFieldName.equals("fieldStatistics")) {
|
||||||
termVectorsRequest.fieldStatistics(parser.booleanValue());
|
termVectorsRequest.fieldStatistics(parser.booleanValue());
|
||||||
} else if (currentFieldName.equals("dfs")) {
|
} else if (DFS.match(currentFieldName)) {
|
||||||
throw new IllegalArgumentException("distributed frequencies is not supported anymore for term vectors");
|
throw new IllegalArgumentException("distributed frequencies is not supported anymore for term vectors");
|
||||||
} else if (currentFieldName.equals("per_field_analyzer") || currentFieldName.equals("perFieldAnalyzer")) {
|
} else if (currentFieldName.equals("per_field_analyzer") || currentFieldName.equals("perFieldAnalyzer")) {
|
||||||
termVectorsRequest.perFieldAnalyzer(readPerFieldAnalyzer(parser.map()));
|
termVectorsRequest.perFieldAnalyzer(readPerFieldAnalyzer(parser.map()));
|
||||||
} else if (currentFieldName.equals("filter")) {
|
} else if (FILTER.match(currentFieldName)) {
|
||||||
termVectorsRequest.filterSettings(readFilterSettings(parser));
|
termVectorsRequest.filterSettings(readFilterSettings(parser));
|
||||||
} else if ("_index".equals(currentFieldName)) { // the following is important for multi request parsing.
|
} else if (INDEX.match(currentFieldName)) { // the following is important for multi request parsing.
|
||||||
termVectorsRequest.index = parser.text();
|
termVectorsRequest.index = parser.text();
|
||||||
} else if ("_type".equals(currentFieldName)) {
|
} else if (TYPE.match(currentFieldName)) {
|
||||||
termVectorsRequest.type = parser.text();
|
termVectorsRequest.type = parser.text();
|
||||||
} else if ("_id".equals(currentFieldName)) {
|
} else if (ID.match(currentFieldName)) {
|
||||||
if (termVectorsRequest.doc != null) {
|
if (termVectorsRequest.doc != null) {
|
||||||
throw new ElasticsearchParseException("failed to parse term vectors request. either [id] or [doc] can be specified, but not both!");
|
throw new ElasticsearchParseException("failed to parse term vectors request. either [id] or [doc] can be specified, but not both!");
|
||||||
}
|
}
|
||||||
termVectorsRequest.id = parser.text();
|
termVectorsRequest.id = parser.text();
|
||||||
} else if ("doc".equals(currentFieldName)) {
|
} else if (DOC.match(currentFieldName)) {
|
||||||
if (termVectorsRequest.id != null) {
|
if (termVectorsRequest.id != null) {
|
||||||
throw new ElasticsearchParseException("failed to parse term vectors request. either [id] or [doc] can be specified, but not both!");
|
throw new ElasticsearchParseException("failed to parse term vectors request. either [id] or [doc] can be specified, but not both!");
|
||||||
}
|
}
|
||||||
termVectorsRequest.doc(jsonBuilder().copyCurrentStructure(parser));
|
termVectorsRequest.doc(jsonBuilder().copyCurrentStructure(parser));
|
||||||
} else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) {
|
} else if (ROUTING.match(currentFieldName)) {
|
||||||
termVectorsRequest.routing = parser.text();
|
termVectorsRequest.routing = parser.text();
|
||||||
} else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) {
|
} else if (PARENT.match(currentFieldName)) {
|
||||||
termVectorsRequest.parent = parser.text();
|
termVectorsRequest.parent = parser.text();
|
||||||
} else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) {
|
} else if (VERSION.match(currentFieldName)) {
|
||||||
termVectorsRequest.version = parser.longValue();
|
termVectorsRequest.version = parser.longValue();
|
||||||
} else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) {
|
} else if (VERSION_TYPE.match(currentFieldName)) {
|
||||||
termVectorsRequest.versionType = VersionType.fromString(parser.text());
|
termVectorsRequest.versionType = VersionType.fromString(parser.text());
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException("failed to parse term vectors request. unknown field [{}]", currentFieldName);
|
throw new ElasticsearchParseException("failed to parse term vectors request. unknown field [{}]", currentFieldName);
|
||||||
|
|
|
@ -92,23 +92,31 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
||||||
private static final Set<Class<? extends MappedFieldType>> SUPPORTED_FIELD_TYPES = new HashSet<>(
|
private static final Set<Class<? extends MappedFieldType>> SUPPORTED_FIELD_TYPES = new HashSet<>(
|
||||||
Arrays.asList(TextFieldType.class, KeywordFieldType.class));
|
Arrays.asList(TextFieldType.class, KeywordFieldType.class));
|
||||||
|
|
||||||
private interface Field {
|
private static final ParseField FIELDS = new ParseField("fields");
|
||||||
ParseField FIELDS = new ParseField("fields");
|
private static final ParseField LIKE = new ParseField("like");
|
||||||
ParseField LIKE = new ParseField("like");
|
private static final ParseField UNLIKE = new ParseField("unlike");
|
||||||
ParseField UNLIKE = new ParseField("unlike");
|
private static final ParseField MAX_QUERY_TERMS = new ParseField("max_query_terms");
|
||||||
ParseField MAX_QUERY_TERMS = new ParseField("max_query_terms");
|
private static final ParseField MIN_TERM_FREQ = new ParseField("min_term_freq");
|
||||||
ParseField MIN_TERM_FREQ = new ParseField("min_term_freq");
|
private static final ParseField MIN_DOC_FREQ = new ParseField("min_doc_freq");
|
||||||
ParseField MIN_DOC_FREQ = new ParseField("min_doc_freq");
|
private static final ParseField MAX_DOC_FREQ = new ParseField("max_doc_freq");
|
||||||
ParseField MAX_DOC_FREQ = new ParseField("max_doc_freq");
|
private static final ParseField MIN_WORD_LENGTH = new ParseField("min_word_length");
|
||||||
ParseField MIN_WORD_LENGTH = new ParseField("min_word_length");
|
private static final ParseField MAX_WORD_LENGTH = new ParseField("max_word_length");
|
||||||
ParseField MAX_WORD_LENGTH = new ParseField("max_word_length");
|
private static final ParseField STOP_WORDS = new ParseField("stop_words");
|
||||||
ParseField STOP_WORDS = new ParseField("stop_words");
|
private static final ParseField ANALYZER = new ParseField("analyzer");
|
||||||
ParseField ANALYZER = new ParseField("analyzer");
|
private static final ParseField MINIMUM_SHOULD_MATCH = new ParseField("minimum_should_match");
|
||||||
ParseField MINIMUM_SHOULD_MATCH = new ParseField("minimum_should_match");
|
private static final ParseField BOOST_TERMS = new ParseField("boost_terms");
|
||||||
ParseField BOOST_TERMS = new ParseField("boost_terms");
|
private static final ParseField INCLUDE = new ParseField("include");
|
||||||
ParseField INCLUDE = new ParseField("include");
|
private static final ParseField FAIL_ON_UNSUPPORTED_FIELD = new ParseField("fail_on_unsupported_field");
|
||||||
ParseField FAIL_ON_UNSUPPORTED_FIELD = new ParseField("fail_on_unsupported_field");
|
|
||||||
}
|
private static final ParseField INDEX = new ParseField("_index");
|
||||||
|
private static final ParseField TYPE = new ParseField("_type");
|
||||||
|
private static final ParseField ID = new ParseField("_id");
|
||||||
|
public static final ParseField DOC = new ParseField("doc");
|
||||||
|
private static final ParseField PER_FIELD_ANALYZER = new ParseField("per_field_analyzer");
|
||||||
|
private static final ParseField ROUTING = new ParseField("routing");
|
||||||
|
private static final ParseField VERSION = new ParseField("version");
|
||||||
|
private static final ParseField VERSION_TYPE = new ParseField("version_type");
|
||||||
|
|
||||||
|
|
||||||
// document inputs
|
// document inputs
|
||||||
private final String[] fields;
|
private final String[] fields;
|
||||||
|
@ -141,18 +149,6 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
||||||
public static final class Item implements ToXContentObject, Writeable {
|
public static final class Item implements ToXContentObject, Writeable {
|
||||||
public static final Item[] EMPTY_ARRAY = new Item[0];
|
public static final Item[] EMPTY_ARRAY = new Item[0];
|
||||||
|
|
||||||
public interface Field {
|
|
||||||
ParseField INDEX = new ParseField("_index");
|
|
||||||
ParseField TYPE = new ParseField("_type");
|
|
||||||
ParseField ID = new ParseField("_id");
|
|
||||||
ParseField DOC = new ParseField("doc");
|
|
||||||
ParseField FIELDS = new ParseField("fields");
|
|
||||||
ParseField PER_FIELD_ANALYZER = new ParseField("per_field_analyzer");
|
|
||||||
ParseField ROUTING = new ParseField("_routing");
|
|
||||||
ParseField VERSION = new ParseField("_version");
|
|
||||||
ParseField VERSION_TYPE = new ParseField("_version_type");
|
|
||||||
}
|
|
||||||
|
|
||||||
private String index;
|
private String index;
|
||||||
private String type;
|
private String type;
|
||||||
private String id;
|
private String id;
|
||||||
|
@ -370,16 +366,16 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
if (token == XContentParser.Token.FIELD_NAME) {
|
||||||
currentFieldName = parser.currentName();
|
currentFieldName = parser.currentName();
|
||||||
} else if (currentFieldName != null) {
|
} else if (currentFieldName != null) {
|
||||||
if (Field.INDEX.match(currentFieldName)) {
|
if (INDEX.match(currentFieldName)) {
|
||||||
item.index = parser.text();
|
item.index = parser.text();
|
||||||
} else if (Field.TYPE.match(currentFieldName)) {
|
} else if (TYPE.match(currentFieldName)) {
|
||||||
item.type = parser.text();
|
item.type = parser.text();
|
||||||
} else if (Field.ID.match(currentFieldName)) {
|
} else if (ID.match(currentFieldName)) {
|
||||||
item.id = parser.text();
|
item.id = parser.text();
|
||||||
} else if (Field.DOC.match(currentFieldName)) {
|
} else if (DOC.match(currentFieldName)) {
|
||||||
item.doc = jsonBuilder().copyCurrentStructure(parser).bytes();
|
item.doc = jsonBuilder().copyCurrentStructure(parser).bytes();
|
||||||
item.xContentType = XContentType.JSON;
|
item.xContentType = XContentType.JSON;
|
||||||
} else if (Field.FIELDS.match(currentFieldName)) {
|
} else if (FIELDS.match(currentFieldName)) {
|
||||||
if (token == XContentParser.Token.START_ARRAY) {
|
if (token == XContentParser.Token.START_ARRAY) {
|
||||||
List<String> fields = new ArrayList<>();
|
List<String> fields = new ArrayList<>();
|
||||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||||
|
@ -390,14 +386,13 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
||||||
throw new ElasticsearchParseException(
|
throw new ElasticsearchParseException(
|
||||||
"failed to parse More Like This item. field [fields] must be an array");
|
"failed to parse More Like This item. field [fields] must be an array");
|
||||||
}
|
}
|
||||||
} else if (Field.PER_FIELD_ANALYZER.match(currentFieldName)) {
|
} else if (PER_FIELD_ANALYZER.match(currentFieldName)) {
|
||||||
item.perFieldAnalyzer(TermVectorsRequest.readPerFieldAnalyzer(parser.map()));
|
item.perFieldAnalyzer(TermVectorsRequest.readPerFieldAnalyzer(parser.map()));
|
||||||
} else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) {
|
} else if (ROUTING.match(currentFieldName)) {
|
||||||
item.routing = parser.text();
|
item.routing = parser.text();
|
||||||
} else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) {
|
} else if (VERSION.match(currentFieldName)) {
|
||||||
item.version = parser.longValue();
|
item.version = parser.longValue();
|
||||||
} else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName)
|
} else if (VERSION_TYPE.match(currentFieldName)) {
|
||||||
|| "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) {
|
|
||||||
item.versionType = VersionType.fromString(parser.text());
|
item.versionType = VersionType.fromString(parser.text());
|
||||||
} else {
|
} else {
|
||||||
throw new ElasticsearchParseException(
|
throw new ElasticsearchParseException(
|
||||||
|
@ -420,31 +415,31 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
if (this.index != null) {
|
if (this.index != null) {
|
||||||
builder.field(Field.INDEX.getPreferredName(), this.index);
|
builder.field(INDEX.getPreferredName(), this.index);
|
||||||
}
|
}
|
||||||
if (this.type != null) {
|
if (this.type != null) {
|
||||||
builder.field(Field.TYPE.getPreferredName(), this.type);
|
builder.field(TYPE.getPreferredName(), this.type);
|
||||||
}
|
}
|
||||||
if (this.id != null) {
|
if (this.id != null) {
|
||||||
builder.field(Field.ID.getPreferredName(), this.id);
|
builder.field(ID.getPreferredName(), this.id);
|
||||||
}
|
}
|
||||||
if (this.doc != null) {
|
if (this.doc != null) {
|
||||||
builder.rawField(Field.DOC.getPreferredName(), this.doc, xContentType);
|
builder.rawField(DOC.getPreferredName(), this.doc, xContentType);
|
||||||
}
|
}
|
||||||
if (this.fields != null) {
|
if (this.fields != null) {
|
||||||
builder.array(Field.FIELDS.getPreferredName(), this.fields);
|
builder.array(FIELDS.getPreferredName(), this.fields);
|
||||||
}
|
}
|
||||||
if (this.perFieldAnalyzer != null) {
|
if (this.perFieldAnalyzer != null) {
|
||||||
builder.field(Field.PER_FIELD_ANALYZER.getPreferredName(), this.perFieldAnalyzer);
|
builder.field(PER_FIELD_ANALYZER.getPreferredName(), this.perFieldAnalyzer);
|
||||||
}
|
}
|
||||||
if (this.routing != null) {
|
if (this.routing != null) {
|
||||||
builder.field(Field.ROUTING.getPreferredName(), this.routing);
|
builder.field(ROUTING.getPreferredName(), this.routing);
|
||||||
}
|
}
|
||||||
if (this.version != Versions.MATCH_ANY) {
|
if (this.version != Versions.MATCH_ANY) {
|
||||||
builder.field(Field.VERSION.getPreferredName(), this.version);
|
builder.field(VERSION.getPreferredName(), this.version);
|
||||||
}
|
}
|
||||||
if (this.versionType != VersionType.INTERNAL) {
|
if (this.versionType != VersionType.INTERNAL) {
|
||||||
builder.field(Field.VERSION_TYPE.getPreferredName(), this.versionType.toString().toLowerCase(Locale.ROOT));
|
builder.field(VERSION_TYPE.getPreferredName(), this.versionType.toString().toLowerCase(Locale.ROOT));
|
||||||
}
|
}
|
||||||
return builder.endObject();
|
return builder.endObject();
|
||||||
}
|
}
|
||||||
|
@ -781,26 +776,26 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
||||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject(NAME);
|
builder.startObject(NAME);
|
||||||
if (fields != null) {
|
if (fields != null) {
|
||||||
builder.array(Field.FIELDS.getPreferredName(), fields);
|
builder.array(FIELDS.getPreferredName(), fields);
|
||||||
}
|
}
|
||||||
buildLikeField(builder, Field.LIKE.getPreferredName(), likeTexts, likeItems);
|
buildLikeField(builder, LIKE.getPreferredName(), likeTexts, likeItems);
|
||||||
buildLikeField(builder, Field.UNLIKE.getPreferredName(), unlikeTexts, unlikeItems);
|
buildLikeField(builder, UNLIKE.getPreferredName(), unlikeTexts, unlikeItems);
|
||||||
builder.field(Field.MAX_QUERY_TERMS.getPreferredName(), maxQueryTerms);
|
builder.field(MAX_QUERY_TERMS.getPreferredName(), maxQueryTerms);
|
||||||
builder.field(Field.MIN_TERM_FREQ.getPreferredName(), minTermFreq);
|
builder.field(MIN_TERM_FREQ.getPreferredName(), minTermFreq);
|
||||||
builder.field(Field.MIN_DOC_FREQ.getPreferredName(), minDocFreq);
|
builder.field(MIN_DOC_FREQ.getPreferredName(), minDocFreq);
|
||||||
builder.field(Field.MAX_DOC_FREQ.getPreferredName(), maxDocFreq);
|
builder.field(MAX_DOC_FREQ.getPreferredName(), maxDocFreq);
|
||||||
builder.field(Field.MIN_WORD_LENGTH.getPreferredName(), minWordLength);
|
builder.field(MIN_WORD_LENGTH.getPreferredName(), minWordLength);
|
||||||
builder.field(Field.MAX_WORD_LENGTH.getPreferredName(), maxWordLength);
|
builder.field(MAX_WORD_LENGTH.getPreferredName(), maxWordLength);
|
||||||
if (stopWords != null) {
|
if (stopWords != null) {
|
||||||
builder.array(Field.STOP_WORDS.getPreferredName(), stopWords);
|
builder.array(STOP_WORDS.getPreferredName(), stopWords);
|
||||||
}
|
}
|
||||||
if (analyzer != null) {
|
if (analyzer != null) {
|
||||||
builder.field(Field.ANALYZER.getPreferredName(), analyzer);
|
builder.field(ANALYZER.getPreferredName(), analyzer);
|
||||||
}
|
}
|
||||||
builder.field(Field.MINIMUM_SHOULD_MATCH.getPreferredName(), minimumShouldMatch);
|
builder.field(MINIMUM_SHOULD_MATCH.getPreferredName(), minimumShouldMatch);
|
||||||
builder.field(Field.BOOST_TERMS.getPreferredName(), boostTerms);
|
builder.field(BOOST_TERMS.getPreferredName(), boostTerms);
|
||||||
builder.field(Field.INCLUDE.getPreferredName(), include);
|
builder.field(INCLUDE.getPreferredName(), include);
|
||||||
builder.field(Field.FAIL_ON_UNSUPPORTED_FIELD.getPreferredName(), failOnUnsupportedField);
|
builder.field(FAIL_ON_UNSUPPORTED_FIELD.getPreferredName(), failOnUnsupportedField);
|
||||||
printBoostAndQueryName(builder);
|
printBoostAndQueryName(builder);
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
}
|
}
|
||||||
|
@ -839,31 +834,31 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
if (token == XContentParser.Token.FIELD_NAME) {
|
||||||
currentFieldName = parser.currentName();
|
currentFieldName = parser.currentName();
|
||||||
} else if (token.isValue()) {
|
} else if (token.isValue()) {
|
||||||
if (Field.LIKE.match(currentFieldName)) {
|
if (LIKE.match(currentFieldName)) {
|
||||||
parseLikeField(parser, likeTexts, likeItems);
|
parseLikeField(parser, likeTexts, likeItems);
|
||||||
} else if (Field.UNLIKE.match(currentFieldName)) {
|
} else if (UNLIKE.match(currentFieldName)) {
|
||||||
parseLikeField(parser, unlikeTexts, unlikeItems);
|
parseLikeField(parser, unlikeTexts, unlikeItems);
|
||||||
} else if (Field.MAX_QUERY_TERMS.match(currentFieldName)) {
|
} else if (MAX_QUERY_TERMS.match(currentFieldName)) {
|
||||||
maxQueryTerms = parser.intValue();
|
maxQueryTerms = parser.intValue();
|
||||||
} else if (Field.MIN_TERM_FREQ.match(currentFieldName)) {
|
} else if (MIN_TERM_FREQ.match(currentFieldName)) {
|
||||||
minTermFreq =parser.intValue();
|
minTermFreq =parser.intValue();
|
||||||
} else if (Field.MIN_DOC_FREQ.match(currentFieldName)) {
|
} else if (MIN_DOC_FREQ.match(currentFieldName)) {
|
||||||
minDocFreq = parser.intValue();
|
minDocFreq = parser.intValue();
|
||||||
} else if (Field.MAX_DOC_FREQ.match(currentFieldName)) {
|
} else if (MAX_DOC_FREQ.match(currentFieldName)) {
|
||||||
maxDocFreq = parser.intValue();
|
maxDocFreq = parser.intValue();
|
||||||
} else if (Field.MIN_WORD_LENGTH.match(currentFieldName)) {
|
} else if (MIN_WORD_LENGTH.match(currentFieldName)) {
|
||||||
minWordLength = parser.intValue();
|
minWordLength = parser.intValue();
|
||||||
} else if (Field.MAX_WORD_LENGTH.match(currentFieldName)) {
|
} else if (MAX_WORD_LENGTH.match(currentFieldName)) {
|
||||||
maxWordLength = parser.intValue();
|
maxWordLength = parser.intValue();
|
||||||
} else if (Field.ANALYZER.match(currentFieldName)) {
|
} else if (ANALYZER.match(currentFieldName)) {
|
||||||
analyzer = parser.text();
|
analyzer = parser.text();
|
||||||
} else if (Field.MINIMUM_SHOULD_MATCH.match(currentFieldName)) {
|
} else if (MINIMUM_SHOULD_MATCH.match(currentFieldName)) {
|
||||||
minimumShouldMatch = parser.text();
|
minimumShouldMatch = parser.text();
|
||||||
} else if (Field.BOOST_TERMS.match(currentFieldName)) {
|
} else if (BOOST_TERMS.match(currentFieldName)) {
|
||||||
boostTerms = parser.floatValue();
|
boostTerms = parser.floatValue();
|
||||||
} else if (Field.INCLUDE.match(currentFieldName)) {
|
} else if (INCLUDE.match(currentFieldName)) {
|
||||||
include = parser.booleanValue();
|
include = parser.booleanValue();
|
||||||
} else if (Field.FAIL_ON_UNSUPPORTED_FIELD.match(currentFieldName)) {
|
} else if (FAIL_ON_UNSUPPORTED_FIELD.match(currentFieldName)) {
|
||||||
failOnUnsupportedField = parser.booleanValue();
|
failOnUnsupportedField = parser.booleanValue();
|
||||||
} else if ("boost".equals(currentFieldName)) {
|
} else if ("boost".equals(currentFieldName)) {
|
||||||
boost = parser.floatValue();
|
boost = parser.floatValue();
|
||||||
|
@ -873,20 +868,20 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
||||||
throw new ParsingException(parser.getTokenLocation(), "[mlt] query does not support [" + currentFieldName + "]");
|
throw new ParsingException(parser.getTokenLocation(), "[mlt] query does not support [" + currentFieldName + "]");
|
||||||
}
|
}
|
||||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||||
if (Field.FIELDS.match(currentFieldName)) {
|
if (FIELDS.match(currentFieldName)) {
|
||||||
fields = new ArrayList<>();
|
fields = new ArrayList<>();
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||||
fields.add(parser.text());
|
fields.add(parser.text());
|
||||||
}
|
}
|
||||||
} else if (Field.LIKE.match(currentFieldName)) {
|
} else if (LIKE.match(currentFieldName)) {
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||||
parseLikeField(parser, likeTexts, likeItems);
|
parseLikeField(parser, likeTexts, likeItems);
|
||||||
}
|
}
|
||||||
} else if (Field.UNLIKE.match(currentFieldName)) {
|
} else if (UNLIKE.match(currentFieldName)) {
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||||
parseLikeField(parser, unlikeTexts, unlikeItems);
|
parseLikeField(parser, unlikeTexts, unlikeItems);
|
||||||
}
|
}
|
||||||
} else if (Field.STOP_WORDS.match(currentFieldName)) {
|
} else if (STOP_WORDS.match(currentFieldName)) {
|
||||||
stopWords = new ArrayList<>();
|
stopWords = new ArrayList<>();
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||||
stopWords.add(parser.text());
|
stopWords.add(parser.text());
|
||||||
|
@ -895,9 +890,9 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
||||||
throw new ParsingException(parser.getTokenLocation(), "[mlt] query does not support [" + currentFieldName + "]");
|
throw new ParsingException(parser.getTokenLocation(), "[mlt] query does not support [" + currentFieldName + "]");
|
||||||
}
|
}
|
||||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||||
if (Field.LIKE.match(currentFieldName)) {
|
if (LIKE.match(currentFieldName)) {
|
||||||
parseLikeField(parser, likeTexts, likeItems);
|
parseLikeField(parser, likeTexts, likeItems);
|
||||||
} else if (Field.UNLIKE.match(currentFieldName)) {
|
} else if (UNLIKE.match(currentFieldName)) {
|
||||||
parseLikeField(parser, unlikeTexts, unlikeItems);
|
parseLikeField(parser, unlikeTexts, unlikeItems);
|
||||||
} else {
|
} else {
|
||||||
throw new ParsingException(parser.getTokenLocation(), "[mlt] query does not support [" + currentFieldName + "]");
|
throw new ParsingException(parser.getTokenLocation(), "[mlt] query does not support [" + currentFieldName + "]");
|
||||||
|
|
|
@ -292,7 +292,7 @@ public class BulkRequestTests extends ESTestCase {
|
||||||
builder.field("_index", "index");
|
builder.field("_index", "index");
|
||||||
builder.field("_type", "type");
|
builder.field("_type", "type");
|
||||||
builder.field("_id", "id");
|
builder.field("_id", "id");
|
||||||
builder.field("_version", 1L);
|
builder.field("version", 1L);
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
}
|
}
|
||||||
|
@ -301,7 +301,7 @@ public class BulkRequestTests extends ESTestCase {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
builder.field("doc", "{}");
|
builder.field("doc", "{}");
|
||||||
Map<String,Object> values = new HashMap<>();
|
Map<String,Object> values = new HashMap<>();
|
||||||
values.put("_version", 2L);
|
values.put("version", 2L);
|
||||||
values.put("_index", "index");
|
values.put("_index", "index");
|
||||||
values.put("_type", "type");
|
values.put("_type", "type");
|
||||||
builder.field("upsert", values);
|
builder.field("upsert", values);
|
||||||
|
|
|
@ -67,7 +67,7 @@ import static org.hamcrest.Matchers.instanceOf;
|
||||||
|
|
||||||
public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLikeThisQueryBuilder> {
|
public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLikeThisQueryBuilder> {
|
||||||
|
|
||||||
private static final String[] SHUFFLE_PROTECTED_FIELDS = new String[]{Item.Field.DOC.getPreferredName()};
|
private static final String[] SHUFFLE_PROTECTED_FIELDS = new String[]{MoreLikeThisQueryBuilder.DOC.getPreferredName()};
|
||||||
|
|
||||||
private static String[] randomFields;
|
private static String[] randomFields;
|
||||||
private static Item[] randomLikeItems;
|
private static Item[] randomLikeItems;
|
||||||
|
@ -222,7 +222,7 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
|
||||||
@Override
|
@Override
|
||||||
protected Set<String> getObjectsHoldingArbitraryContent() {
|
protected Set<String> getObjectsHoldingArbitraryContent() {
|
||||||
//doc contains arbitrary content, anything can be added to it and no exception will be thrown
|
//doc contains arbitrary content, anything can be added to it and no exception will be thrown
|
||||||
return Collections.singleton(MoreLikeThisQueryBuilder.Item.Field.DOC.getPreferredName());
|
return Collections.singleton(MoreLikeThisQueryBuilder.DOC.getPreferredName());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
{ "update" : {"_id" : "1", "_retry_on_conflict" : 2} }
|
{ "update" : {"_id" : "1", "retry_on_conflict" : 2} }
|
||||||
{ "doc" : {"field" : "value"} }
|
{ "doc" : {"field" : "value"} }
|
||||||
{ "update" : { "_id" : "0", "_type" : "type1", "_index" : "index1" } }
|
{ "update" : { "_id" : "0", "_type" : "type1", "_index" : "index1" } }
|
||||||
{ "script" : { "source" : "counter += param1", "lang" : "javascript", "params" : {"param1" : 1}}, "upsert" : {"counter" : 1}}
|
{ "script" : { "source" : "counter += param1", "lang" : "javascript", "params" : {"param1" : 1}}, "upsert" : {"counter" : 1}}
|
||||||
|
|
|
@ -201,16 +201,16 @@ chunks, as this will slow things down.
|
||||||
=== Versioning
|
=== Versioning
|
||||||
|
|
||||||
Each bulk item can include the version value using the
|
Each bulk item can include the version value using the
|
||||||
`_version`/`version` field. It automatically follows the behavior of the
|
`version` field. It automatically follows the behavior of the
|
||||||
index / delete operation based on the `_version` mapping. It also
|
index / delete operation based on the `_version` mapping. It also
|
||||||
support the `version_type`/`_version_type` (see <<index-versioning, versioning>>)
|
support the `version_type` (see <<index-versioning, versioning>>)
|
||||||
|
|
||||||
[float]
|
[float]
|
||||||
[[bulk-routing]]
|
[[bulk-routing]]
|
||||||
=== Routing
|
=== Routing
|
||||||
|
|
||||||
Each bulk item can include the routing value using the
|
Each bulk item can include the routing value using the
|
||||||
`_routing`/`routing` field. It automatically follows the behavior of the
|
`routing` field. It automatically follows the behavior of the
|
||||||
index / delete operation based on the `_routing` mapping.
|
index / delete operation based on the `_routing` mapping.
|
||||||
|
|
||||||
[float]
|
[float]
|
||||||
|
@ -234,7 +234,7 @@ Control when the changes made by this request are visible to search. See
|
||||||
[[bulk-update]]
|
[[bulk-update]]
|
||||||
=== Update
|
=== Update
|
||||||
|
|
||||||
When using `update` action `_retry_on_conflict` can be used as field in
|
When using `update` action `retry_on_conflict` can be used as field in
|
||||||
the action itself (not in the extra payload line), to specify how many
|
the action itself (not in the extra payload line), to specify how many
|
||||||
times an update should be retried in the case of a version conflict.
|
times an update should be retried in the case of a version conflict.
|
||||||
|
|
||||||
|
@ -246,11 +246,11 @@ the options. Example with update actions:
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
POST _bulk
|
POST _bulk
|
||||||
{ "update" : {"_id" : "1", "_type" : "type1", "_index" : "index1", "_retry_on_conflict" : 3} }
|
{ "update" : {"_id" : "1", "_type" : "type1", "_index" : "index1", "retry_on_conflict" : 3} }
|
||||||
{ "doc" : {"field" : "value"} }
|
{ "doc" : {"field" : "value"} }
|
||||||
{ "update" : { "_id" : "0", "_type" : "type1", "_index" : "index1", "_retry_on_conflict" : 3} }
|
{ "update" : { "_id" : "0", "_type" : "type1", "_index" : "index1", "retry_on_conflict" : 3} }
|
||||||
{ "script" : { "source": "ctx._source.counter += params.param1", "lang" : "painless", "params" : {"param1" : 1}}, "upsert" : {"counter" : 1}}
|
{ "script" : { "source": "ctx._source.counter += params.param1", "lang" : "painless", "params" : {"param1" : 1}}, "upsert" : {"counter" : 1}}
|
||||||
{ "update" : {"_id" : "2", "_type" : "type1", "_index" : "index1", "_retry_on_conflict" : 3} }
|
{ "update" : {"_id" : "2", "_type" : "type1", "_index" : "index1", "retry_on_conflict" : 3} }
|
||||||
{ "doc" : {"field" : "value"}, "doc_as_upsert" : true }
|
{ "doc" : {"field" : "value"}, "doc_as_upsert" : true }
|
||||||
{ "update" : {"_id" : "3", "_type" : "type1", "_index" : "index1", "_source" : true} }
|
{ "update" : {"_id" : "3", "_type" : "type1", "_index" : "index1", "_source" : true} }
|
||||||
{ "doc" : {"field" : "value"} }
|
{ "doc" : {"field" : "value"} }
|
||||||
|
|
|
@ -230,7 +230,7 @@ GET /_mget?routing=key1
|
||||||
"_index" : "test",
|
"_index" : "test",
|
||||||
"_type" : "type",
|
"_type" : "type",
|
||||||
"_id" : "1",
|
"_id" : "1",
|
||||||
"_routing" : "key2"
|
"routing" : "key2"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"_index" : "test",
|
"_index" : "test",
|
||||||
|
|
|
@ -30,6 +30,8 @@ way to reindex old indices is to use the `reindex` API.
|
||||||
* <<breaking_70_mappings_changes>>
|
* <<breaking_70_mappings_changes>>
|
||||||
* <<breaking_70_search_changes>>
|
* <<breaking_70_search_changes>>
|
||||||
* <<breaking_70_plugins_changes>>
|
* <<breaking_70_plugins_changes>>
|
||||||
|
* <<breaking_70_api_changes>>
|
||||||
|
|
||||||
|
|
||||||
include::migrate_7_0/aggregations.asciidoc[]
|
include::migrate_7_0/aggregations.asciidoc[]
|
||||||
include::migrate_7_0/cluster.asciidoc[]
|
include::migrate_7_0/cluster.asciidoc[]
|
||||||
|
@ -37,3 +39,5 @@ include::migrate_7_0/indices.asciidoc[]
|
||||||
include::migrate_7_0/mappings.asciidoc[]
|
include::migrate_7_0/mappings.asciidoc[]
|
||||||
include::migrate_7_0/search.asciidoc[]
|
include::migrate_7_0/search.asciidoc[]
|
||||||
include::migrate_7_0/plugins.asciidoc[]
|
include::migrate_7_0/plugins.asciidoc[]
|
||||||
|
include::migrate_7_0/api.asciidoc[]
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,24 @@
|
||||||
|
[[breaking_70_api_changes]]
|
||||||
|
=== Breaking changes in 7.0
|
||||||
|
|
||||||
|
==== Camel case and underscore parameters deprecated in 6.x have been removed
|
||||||
|
A number of duplicate parameters deprecated in 6.x have been removed from
|
||||||
|
Bulk request, Multi Get request, Term Vectors request, and More Like This Query
|
||||||
|
requests.
|
||||||
|
|
||||||
|
The following camel case parameters have been removed:
|
||||||
|
|
||||||
|
* `opType`
|
||||||
|
* `versionType`, `_versionType`
|
||||||
|
|
||||||
|
The following parameters starting with underscore have been removed:
|
||||||
|
|
||||||
|
* `_parent`
|
||||||
|
* `_retry_on_conflict`
|
||||||
|
* `_routing`
|
||||||
|
* `_version`
|
||||||
|
* `_version_type`
|
||||||
|
|
||||||
|
Instead of these removed parameters, use their non camel case equivalents without
|
||||||
|
starting underscore, e.g. use `version_type` instead of `_version_type` or `versionType`.
|
||||||
|
|
|
@ -0,0 +1,26 @@
|
||||||
|
|
||||||
|
---
|
||||||
|
"Deprecated parameters should fail in Bulk query":
|
||||||
|
|
||||||
|
- skip:
|
||||||
|
version: " - 6.99.99"
|
||||||
|
reason: some parameters are removed starting from 7.0, their equivalents without underscore are used instead
|
||||||
|
features: "warnings"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
catch: bad_request
|
||||||
|
bulk:
|
||||||
|
body: |
|
||||||
|
{ "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_1", "_version": 1 } }
|
||||||
|
{ "doc": { "f1": "v1" } }
|
||||||
|
{ "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_2", "_version": 1 } }
|
||||||
|
{ "doc": { "f1": "v2" } }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
catch: bad_request
|
||||||
|
bulk:
|
||||||
|
body: |
|
||||||
|
{ "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_1", "_routing": "test1" } }
|
||||||
|
{ "doc": { "f1": "v1" } }
|
||||||
|
{ "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_2", "_routing": "test1" } }
|
||||||
|
{ "doc": { "f1": "v2" } }
|
|
@ -0,0 +1,38 @@
|
||||||
|
|
||||||
|
---
|
||||||
|
"Deprecated parameters should fail in Multi Get query":
|
||||||
|
|
||||||
|
- skip:
|
||||||
|
version: " - 6.99.99"
|
||||||
|
reason: _version, _routing are removed starting from 7.0, their equivalents without underscore are used instead
|
||||||
|
features: "warnings"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test_1
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
body: { foo: bar }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test_1
|
||||||
|
type: test
|
||||||
|
id: 2
|
||||||
|
body: { foo: baz }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
catch: bad_request
|
||||||
|
mget:
|
||||||
|
body:
|
||||||
|
docs:
|
||||||
|
- { _index: test_1, _type: test, _id: 1, _routing : test1 }
|
||||||
|
- { _index: test_1, _type: test, _id: 2, _routing : test1 }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
catch: bad_request
|
||||||
|
mget:
|
||||||
|
body:
|
||||||
|
docs:
|
||||||
|
- { _index: test_1, _type: test, _id: 1, _version : 1 }
|
||||||
|
- { _index: test_1, _type: test, _id: 2, _version : 1 }
|
|
@ -0,0 +1,52 @@
|
||||||
|
|
||||||
|
---
|
||||||
|
"Deprecated camel case and _ parameters should fail in Term Vectors query":
|
||||||
|
|
||||||
|
- skip:
|
||||||
|
version: " - 6.99.99"
|
||||||
|
reason: camel case and _ parameters (e.g. versionType, _version_type) should fail from 7.0
|
||||||
|
features: "warnings"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: testidx
|
||||||
|
body:
|
||||||
|
mappings:
|
||||||
|
testtype:
|
||||||
|
properties:
|
||||||
|
text:
|
||||||
|
type : "text"
|
||||||
|
term_vector : "with_positions_offsets"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: testidx
|
||||||
|
type: testtype
|
||||||
|
id: testing_document
|
||||||
|
body: {"text" : "The quick brown fox is brown."}
|
||||||
|
|
||||||
|
- do:
|
||||||
|
catch: bad_request
|
||||||
|
mtermvectors:
|
||||||
|
"term_statistics" : true
|
||||||
|
"body" :
|
||||||
|
"docs":
|
||||||
|
-
|
||||||
|
"_index" : "testidx"
|
||||||
|
"_type" : "testtype"
|
||||||
|
"_id" : "testing_document"
|
||||||
|
"version" : 1
|
||||||
|
"versionType" : "external"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
catch: bad_request
|
||||||
|
mtermvectors:
|
||||||
|
"term_statistics" : true
|
||||||
|
"body" :
|
||||||
|
"docs":
|
||||||
|
-
|
||||||
|
"_index" : "testidx"
|
||||||
|
"_type" : "testtype"
|
||||||
|
"_id" : "testing_document"
|
||||||
|
"version" : 1
|
||||||
|
"_version_type" : "external"
|
Loading…
Reference in New Issue