Standardize underscore requirements in parameters (#27414)

Stardardize underscore requirements in parameters across different type of
requests:
_index, _type, _source, _id keep their underscores
params like version and retry_on_conflict will be without underscores
Throw an error if older versions of parameters are used

BulkRequest, MultiGetRequest, TermVectorcRequest, MoreLikeThisQuery
were changed

Closes #26886
This commit is contained in:
Mayya Sharipova 2017-11-17 15:31:52 -05:00 committed by GitHub
parent a5df2ef538
commit 858b2c7cb8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 328 additions and 144 deletions

View File

@ -191,23 +191,23 @@ public final class Request {
metadata.field("_id", request.id());
}
if (Strings.hasLength(request.routing())) {
metadata.field("_routing", request.routing());
metadata.field("routing", request.routing());
}
if (Strings.hasLength(request.parent())) {
metadata.field("_parent", request.parent());
metadata.field("parent", request.parent());
}
if (request.version() != Versions.MATCH_ANY) {
metadata.field("_version", request.version());
metadata.field("version", request.version());
}
VersionType versionType = request.versionType();
if (versionType != VersionType.INTERNAL) {
if (versionType == VersionType.EXTERNAL) {
metadata.field("_version_type", "external");
metadata.field("version_type", "external");
} else if (versionType == VersionType.EXTERNAL_GTE) {
metadata.field("_version_type", "external_gte");
metadata.field("version_type", "external_gte");
} else if (versionType == VersionType.FORCE) {
metadata.field("_version_type", "force");
metadata.field("version_type", "force");
}
}
@ -219,7 +219,7 @@ public final class Request {
} else if (opType == DocWriteRequest.OpType.UPDATE) {
UpdateRequest updateRequest = (UpdateRequest) request;
if (updateRequest.retryOnConflict() > 0) {
metadata.field("_retry_on_conflict", updateRequest.retryOnConflict());
metadata.field("retry_on_conflict", updateRequest.retryOnConflict());
}
if (updateRequest.fetchSource() != null) {
metadata.field("_source", updateRequest.fetchSource());

View File

@ -30,6 +30,7 @@ import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.action.support.replication.ReplicationRequest;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
@ -68,6 +69,19 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
private static final int REQUEST_OVERHEAD = 50;
private static final ParseField INDEX = new ParseField("_index");
private static final ParseField TYPE = new ParseField("_type");
private static final ParseField ID = new ParseField("_id");
private static final ParseField ROUTING = new ParseField("routing");
private static final ParseField PARENT = new ParseField("parent");
private static final ParseField OP_TYPE = new ParseField("op_type");
private static final ParseField VERSION = new ParseField("version");
private static final ParseField VERSION_TYPE = new ParseField("version_type");
private static final ParseField RETRY_ON_CONFLICT = new ParseField("retry_on_conflict");
private static final ParseField PIPELINE = new ParseField("pipeline");
private static final ParseField FIELDS = new ParseField("fields");
private static final ParseField SOURCE = new ParseField("_source");
/**
* Requests that are part of this request. It is only possible to add things that are both {@link ActionRequest}s and
* {@link WriteRequest}s to this but java doesn't support syntax to declare that everything in the array has both types so we declare
@ -334,45 +348,45 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if ("_index".equals(currentFieldName)) {
if (INDEX.match(currentFieldName)){
if (!allowExplicitIndex) {
throw new IllegalArgumentException("explicit index in bulk is not allowed");
}
index = parser.text();
} else if ("_type".equals(currentFieldName)) {
} else if (TYPE.match(currentFieldName)) {
type = parser.text();
} else if ("_id".equals(currentFieldName)) {
} else if (ID.match(currentFieldName)) {
id = parser.text();
} else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) {
} else if (ROUTING.match(currentFieldName)) {
routing = parser.text();
} else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) {
} else if (PARENT.match(currentFieldName)) {
parent = parser.text();
} else if ("op_type".equals(currentFieldName) || "opType".equals(currentFieldName)) {
} else if (OP_TYPE.match(currentFieldName)) {
opType = parser.text();
} else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) {
} else if (VERSION.match(currentFieldName)) {
version = parser.longValue();
} else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) {
} else if (VERSION_TYPE.match(currentFieldName)) {
versionType = VersionType.fromString(parser.text());
} else if ("_retry_on_conflict".equals(currentFieldName) || "_retryOnConflict".equals(currentFieldName)) {
} else if (RETRY_ON_CONFLICT.match(currentFieldName)) {
retryOnConflict = parser.intValue();
} else if ("pipeline".equals(currentFieldName)) {
} else if (PIPELINE.match(currentFieldName)) {
pipeline = parser.text();
} else if ("fields".equals(currentFieldName)) {
} else if (FIELDS.match(currentFieldName)) {
throw new IllegalArgumentException("Action/metadata line [" + line + "] contains a simple value for parameter [fields] while a list is expected");
} else if ("_source".equals(currentFieldName)) {
} else if (SOURCE.match(currentFieldName)) {
fetchSourceContext = FetchSourceContext.fromXContent(parser);
} else {
throw new IllegalArgumentException("Action/metadata line [" + line + "] contains an unknown parameter [" + currentFieldName + "]");
}
} else if (token == XContentParser.Token.START_ARRAY) {
if ("fields".equals(currentFieldName)) {
if (FIELDS.match(currentFieldName)) {
DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead");
List<Object> values = parser.list();
fields = values.toArray(new String[values.size()]);
} else {
throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]");
}
} else if (token == XContentParser.Token.START_OBJECT && "_source".equals(currentFieldName)) {
} else if (token == XContentParser.Token.START_OBJECT && SOURCE.match(currentFieldName)) {
fetchSourceContext = FetchSourceContext.fromXContent(parser);
} else if (token != XContentParser.Token.VALUE_NULL) {
throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]");

View File

@ -28,6 +28,7 @@ import org.elasticsearch.action.RealtimeRequest;
import org.elasticsearch.action.ValidateActions;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
@ -48,6 +49,17 @@ import java.util.Locale;
public class MultiGetRequest extends ActionRequest implements Iterable<MultiGetRequest.Item>, CompositeIndicesRequest, RealtimeRequest {
private static final ParseField INDEX = new ParseField("_index");
private static final ParseField TYPE = new ParseField("_type");
private static final ParseField ID = new ParseField("_id");
private static final ParseField ROUTING = new ParseField("routing");
private static final ParseField PARENT = new ParseField("parent");
private static final ParseField VERSION = new ParseField("version");
private static final ParseField VERSION_TYPE = new ParseField("version_type");
private static final ParseField FIELDS = new ParseField("fields");
private static final ParseField STORED_FIELDS = new ParseField("stored_fields");
private static final ParseField SOURCE = new ParseField("_source");
/**
* A single get item.
*/
@ -379,30 +391,30 @@ public class MultiGetRequest extends ActionRequest implements Iterable<MultiGetR
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if ("_index".equals(currentFieldName)) {
if (INDEX.match(currentFieldName)) {
if (!allowExplicitIndex) {
throw new IllegalArgumentException("explicit index in multi get is not allowed");
}
index = parser.text();
} else if ("_type".equals(currentFieldName)) {
} else if (TYPE.match(currentFieldName)) {
type = parser.text();
} else if ("_id".equals(currentFieldName)) {
} else if (ID.match(currentFieldName)) {
id = parser.text();
} else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) {
} else if (ROUTING.match(currentFieldName)) {
routing = parser.text();
} else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) {
} else if (PARENT.match(currentFieldName)) {
parent = parser.text();
} else if ("fields".equals(currentFieldName)) {
} else if (FIELDS.match(currentFieldName)) {
throw new ParsingException(parser.getTokenLocation(),
"Unsupported field [fields] used, expected [stored_fields] instead");
} else if ("stored_fields".equals(currentFieldName)) {
} else if (STORED_FIELDS.match(currentFieldName)) {
storedFields = new ArrayList<>();
storedFields.add(parser.text());
} else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) {
} else if (VERSION.match(currentFieldName)) {
version = parser.longValue();
} else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) {
} else if (VERSION_TYPE.match(currentFieldName)) {
versionType = VersionType.fromString(parser.text());
} else if ("_source".equals(currentFieldName)) {
} else if (SOURCE.match(currentFieldName)) {
// check lenient to avoid interpreting the value as string but parse strict in order to provoke an error early on.
if (parser.isBooleanValueLenient()) {
fetchSourceContext = new FetchSourceContext(parser.booleanValue(), fetchSourceContext.includes(),
@ -413,17 +425,19 @@ public class MultiGetRequest extends ActionRequest implements Iterable<MultiGetR
} else {
throw new ElasticsearchParseException("illegal type for _source: [{}]", token);
}
} else {
throw new ElasticsearchParseException("failed to parse multi get request. unknown field [{}]", currentFieldName);
}
} else if (token == XContentParser.Token.START_ARRAY) {
if ("fields".equals(currentFieldName)) {
if (FIELDS.match(currentFieldName)) {
throw new ParsingException(parser.getTokenLocation(),
"Unsupported field [fields] used, expected [stored_fields] instead");
} else if ("stored_fields".equals(currentFieldName)) {
} else if (STORED_FIELDS.match(currentFieldName)) {
storedFields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
storedFields.add(parser.text());
}
} else if ("_source".equals(currentFieldName)) {
} else if (SOURCE.match(currentFieldName)) {
ArrayList<String> includes = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
includes.add(parser.text());
@ -433,7 +447,7 @@ public class MultiGetRequest extends ActionRequest implements Iterable<MultiGetR
}
} else if (token == XContentParser.Token.START_OBJECT) {
if ("_source".equals(currentFieldName)) {
if (SOURCE.match(currentFieldName)) {
List<String> currentList = null, includes = null, excludes = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {

View File

@ -27,6 +27,7 @@ import org.elasticsearch.action.ValidateActions;
import org.elasticsearch.action.get.MultiGetRequest;
import org.elasticsearch.action.support.single.shard.SingleShardRequest;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
@ -60,6 +61,22 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
*/
public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> implements RealtimeRequest {
private static final ParseField INDEX = new ParseField("_index");
private static final ParseField TYPE = new ParseField("_type");
private static final ParseField ID = new ParseField("_id");
private static final ParseField ROUTING = new ParseField("routing");
private static final ParseField PARENT = new ParseField("parent");
private static final ParseField VERSION = new ParseField("version");
private static final ParseField VERSION_TYPE = new ParseField("version_type");
private static final ParseField FIELDS = new ParseField("fields");
private static final ParseField OFFSETS = new ParseField("offsets");
private static final ParseField POSITIONS = new ParseField("positions");
private static final ParseField PAYLOADS = new ParseField("payloads");
private static final ParseField DFS = new ParseField("dfs");
private static final ParseField FILTER = new ParseField("filter");
private static final ParseField DOC = new ParseField("doc");
private String type;
private String id;
@ -593,7 +610,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (currentFieldName != null) {
if (currentFieldName.equals("fields")) {
if (FIELDS.match(currentFieldName)) {
if (token == XContentParser.Token.START_ARRAY) {
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
fields.add(parser.text());
@ -601,43 +618,43 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
} else {
throw new ElasticsearchParseException("failed to parse term vectors request. field [fields] must be an array");
}
} else if (currentFieldName.equals("offsets")) {
} else if (OFFSETS.match(currentFieldName)) {
termVectorsRequest.offsets(parser.booleanValue());
} else if (currentFieldName.equals("positions")) {
} else if (POSITIONS.match(currentFieldName)) {
termVectorsRequest.positions(parser.booleanValue());
} else if (currentFieldName.equals("payloads")) {
} else if (PAYLOADS.match(currentFieldName)) {
termVectorsRequest.payloads(parser.booleanValue());
} else if (currentFieldName.equals("term_statistics") || currentFieldName.equals("termStatistics")) {
termVectorsRequest.termStatistics(parser.booleanValue());
} else if (currentFieldName.equals("field_statistics") || currentFieldName.equals("fieldStatistics")) {
termVectorsRequest.fieldStatistics(parser.booleanValue());
} else if (currentFieldName.equals("dfs")) {
} else if (DFS.match(currentFieldName)) {
throw new IllegalArgumentException("distributed frequencies is not supported anymore for term vectors");
} else if (currentFieldName.equals("per_field_analyzer") || currentFieldName.equals("perFieldAnalyzer")) {
termVectorsRequest.perFieldAnalyzer(readPerFieldAnalyzer(parser.map()));
} else if (currentFieldName.equals("filter")) {
} else if (FILTER.match(currentFieldName)) {
termVectorsRequest.filterSettings(readFilterSettings(parser));
} else if ("_index".equals(currentFieldName)) { // the following is important for multi request parsing.
} else if (INDEX.match(currentFieldName)) { // the following is important for multi request parsing.
termVectorsRequest.index = parser.text();
} else if ("_type".equals(currentFieldName)) {
} else if (TYPE.match(currentFieldName)) {
termVectorsRequest.type = parser.text();
} else if ("_id".equals(currentFieldName)) {
} else if (ID.match(currentFieldName)) {
if (termVectorsRequest.doc != null) {
throw new ElasticsearchParseException("failed to parse term vectors request. either [id] or [doc] can be specified, but not both!");
}
termVectorsRequest.id = parser.text();
} else if ("doc".equals(currentFieldName)) {
} else if (DOC.match(currentFieldName)) {
if (termVectorsRequest.id != null) {
throw new ElasticsearchParseException("failed to parse term vectors request. either [id] or [doc] can be specified, but not both!");
}
termVectorsRequest.doc(jsonBuilder().copyCurrentStructure(parser));
} else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) {
} else if (ROUTING.match(currentFieldName)) {
termVectorsRequest.routing = parser.text();
} else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) {
} else if (PARENT.match(currentFieldName)) {
termVectorsRequest.parent = parser.text();
} else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) {
} else if (VERSION.match(currentFieldName)) {
termVectorsRequest.version = parser.longValue();
} else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) {
} else if (VERSION_TYPE.match(currentFieldName)) {
termVectorsRequest.versionType = VersionType.fromString(parser.text());
} else {
throw new ElasticsearchParseException("failed to parse term vectors request. unknown field [{}]", currentFieldName);

View File

@ -92,23 +92,31 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
private static final Set<Class<? extends MappedFieldType>> SUPPORTED_FIELD_TYPES = new HashSet<>(
Arrays.asList(TextFieldType.class, KeywordFieldType.class));
private interface Field {
ParseField FIELDS = new ParseField("fields");
ParseField LIKE = new ParseField("like");
ParseField UNLIKE = new ParseField("unlike");
ParseField MAX_QUERY_TERMS = new ParseField("max_query_terms");
ParseField MIN_TERM_FREQ = new ParseField("min_term_freq");
ParseField MIN_DOC_FREQ = new ParseField("min_doc_freq");
ParseField MAX_DOC_FREQ = new ParseField("max_doc_freq");
ParseField MIN_WORD_LENGTH = new ParseField("min_word_length");
ParseField MAX_WORD_LENGTH = new ParseField("max_word_length");
ParseField STOP_WORDS = new ParseField("stop_words");
ParseField ANALYZER = new ParseField("analyzer");
ParseField MINIMUM_SHOULD_MATCH = new ParseField("minimum_should_match");
ParseField BOOST_TERMS = new ParseField("boost_terms");
ParseField INCLUDE = new ParseField("include");
ParseField FAIL_ON_UNSUPPORTED_FIELD = new ParseField("fail_on_unsupported_field");
}
private static final ParseField FIELDS = new ParseField("fields");
private static final ParseField LIKE = new ParseField("like");
private static final ParseField UNLIKE = new ParseField("unlike");
private static final ParseField MAX_QUERY_TERMS = new ParseField("max_query_terms");
private static final ParseField MIN_TERM_FREQ = new ParseField("min_term_freq");
private static final ParseField MIN_DOC_FREQ = new ParseField("min_doc_freq");
private static final ParseField MAX_DOC_FREQ = new ParseField("max_doc_freq");
private static final ParseField MIN_WORD_LENGTH = new ParseField("min_word_length");
private static final ParseField MAX_WORD_LENGTH = new ParseField("max_word_length");
private static final ParseField STOP_WORDS = new ParseField("stop_words");
private static final ParseField ANALYZER = new ParseField("analyzer");
private static final ParseField MINIMUM_SHOULD_MATCH = new ParseField("minimum_should_match");
private static final ParseField BOOST_TERMS = new ParseField("boost_terms");
private static final ParseField INCLUDE = new ParseField("include");
private static final ParseField FAIL_ON_UNSUPPORTED_FIELD = new ParseField("fail_on_unsupported_field");
private static final ParseField INDEX = new ParseField("_index");
private static final ParseField TYPE = new ParseField("_type");
private static final ParseField ID = new ParseField("_id");
public static final ParseField DOC = new ParseField("doc");
private static final ParseField PER_FIELD_ANALYZER = new ParseField("per_field_analyzer");
private static final ParseField ROUTING = new ParseField("routing");
private static final ParseField VERSION = new ParseField("version");
private static final ParseField VERSION_TYPE = new ParseField("version_type");
// document inputs
private final String[] fields;
@ -141,18 +149,6 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
public static final class Item implements ToXContentObject, Writeable {
public static final Item[] EMPTY_ARRAY = new Item[0];
public interface Field {
ParseField INDEX = new ParseField("_index");
ParseField TYPE = new ParseField("_type");
ParseField ID = new ParseField("_id");
ParseField DOC = new ParseField("doc");
ParseField FIELDS = new ParseField("fields");
ParseField PER_FIELD_ANALYZER = new ParseField("per_field_analyzer");
ParseField ROUTING = new ParseField("_routing");
ParseField VERSION = new ParseField("_version");
ParseField VERSION_TYPE = new ParseField("_version_type");
}
private String index;
private String type;
private String id;
@ -370,16 +366,16 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (currentFieldName != null) {
if (Field.INDEX.match(currentFieldName)) {
if (INDEX.match(currentFieldName)) {
item.index = parser.text();
} else if (Field.TYPE.match(currentFieldName)) {
} else if (TYPE.match(currentFieldName)) {
item.type = parser.text();
} else if (Field.ID.match(currentFieldName)) {
} else if (ID.match(currentFieldName)) {
item.id = parser.text();
} else if (Field.DOC.match(currentFieldName)) {
} else if (DOC.match(currentFieldName)) {
item.doc = jsonBuilder().copyCurrentStructure(parser).bytes();
item.xContentType = XContentType.JSON;
} else if (Field.FIELDS.match(currentFieldName)) {
} else if (FIELDS.match(currentFieldName)) {
if (token == XContentParser.Token.START_ARRAY) {
List<String> fields = new ArrayList<>();
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
@ -390,14 +386,13 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
throw new ElasticsearchParseException(
"failed to parse More Like This item. field [fields] must be an array");
}
} else if (Field.PER_FIELD_ANALYZER.match(currentFieldName)) {
} else if (PER_FIELD_ANALYZER.match(currentFieldName)) {
item.perFieldAnalyzer(TermVectorsRequest.readPerFieldAnalyzer(parser.map()));
} else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) {
} else if (ROUTING.match(currentFieldName)) {
item.routing = parser.text();
} else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) {
} else if (VERSION.match(currentFieldName)) {
item.version = parser.longValue();
} else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName)
|| "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) {
} else if (VERSION_TYPE.match(currentFieldName)) {
item.versionType = VersionType.fromString(parser.text());
} else {
throw new ElasticsearchParseException(
@ -420,31 +415,31 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (this.index != null) {
builder.field(Field.INDEX.getPreferredName(), this.index);
builder.field(INDEX.getPreferredName(), this.index);
}
if (this.type != null) {
builder.field(Field.TYPE.getPreferredName(), this.type);
builder.field(TYPE.getPreferredName(), this.type);
}
if (this.id != null) {
builder.field(Field.ID.getPreferredName(), this.id);
builder.field(ID.getPreferredName(), this.id);
}
if (this.doc != null) {
builder.rawField(Field.DOC.getPreferredName(), this.doc, xContentType);
builder.rawField(DOC.getPreferredName(), this.doc, xContentType);
}
if (this.fields != null) {
builder.array(Field.FIELDS.getPreferredName(), this.fields);
builder.array(FIELDS.getPreferredName(), this.fields);
}
if (this.perFieldAnalyzer != null) {
builder.field(Field.PER_FIELD_ANALYZER.getPreferredName(), this.perFieldAnalyzer);
builder.field(PER_FIELD_ANALYZER.getPreferredName(), this.perFieldAnalyzer);
}
if (this.routing != null) {
builder.field(Field.ROUTING.getPreferredName(), this.routing);
builder.field(ROUTING.getPreferredName(), this.routing);
}
if (this.version != Versions.MATCH_ANY) {
builder.field(Field.VERSION.getPreferredName(), this.version);
builder.field(VERSION.getPreferredName(), this.version);
}
if (this.versionType != VersionType.INTERNAL) {
builder.field(Field.VERSION_TYPE.getPreferredName(), this.versionType.toString().toLowerCase(Locale.ROOT));
builder.field(VERSION_TYPE.getPreferredName(), this.versionType.toString().toLowerCase(Locale.ROOT));
}
return builder.endObject();
}
@ -781,26 +776,26 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
if (fields != null) {
builder.array(Field.FIELDS.getPreferredName(), fields);
builder.array(FIELDS.getPreferredName(), fields);
}
buildLikeField(builder, Field.LIKE.getPreferredName(), likeTexts, likeItems);
buildLikeField(builder, Field.UNLIKE.getPreferredName(), unlikeTexts, unlikeItems);
builder.field(Field.MAX_QUERY_TERMS.getPreferredName(), maxQueryTerms);
builder.field(Field.MIN_TERM_FREQ.getPreferredName(), minTermFreq);
builder.field(Field.MIN_DOC_FREQ.getPreferredName(), minDocFreq);
builder.field(Field.MAX_DOC_FREQ.getPreferredName(), maxDocFreq);
builder.field(Field.MIN_WORD_LENGTH.getPreferredName(), minWordLength);
builder.field(Field.MAX_WORD_LENGTH.getPreferredName(), maxWordLength);
buildLikeField(builder, LIKE.getPreferredName(), likeTexts, likeItems);
buildLikeField(builder, UNLIKE.getPreferredName(), unlikeTexts, unlikeItems);
builder.field(MAX_QUERY_TERMS.getPreferredName(), maxQueryTerms);
builder.field(MIN_TERM_FREQ.getPreferredName(), minTermFreq);
builder.field(MIN_DOC_FREQ.getPreferredName(), minDocFreq);
builder.field(MAX_DOC_FREQ.getPreferredName(), maxDocFreq);
builder.field(MIN_WORD_LENGTH.getPreferredName(), minWordLength);
builder.field(MAX_WORD_LENGTH.getPreferredName(), maxWordLength);
if (stopWords != null) {
builder.array(Field.STOP_WORDS.getPreferredName(), stopWords);
builder.array(STOP_WORDS.getPreferredName(), stopWords);
}
if (analyzer != null) {
builder.field(Field.ANALYZER.getPreferredName(), analyzer);
builder.field(ANALYZER.getPreferredName(), analyzer);
}
builder.field(Field.MINIMUM_SHOULD_MATCH.getPreferredName(), minimumShouldMatch);
builder.field(Field.BOOST_TERMS.getPreferredName(), boostTerms);
builder.field(Field.INCLUDE.getPreferredName(), include);
builder.field(Field.FAIL_ON_UNSUPPORTED_FIELD.getPreferredName(), failOnUnsupportedField);
builder.field(MINIMUM_SHOULD_MATCH.getPreferredName(), minimumShouldMatch);
builder.field(BOOST_TERMS.getPreferredName(), boostTerms);
builder.field(INCLUDE.getPreferredName(), include);
builder.field(FAIL_ON_UNSUPPORTED_FIELD.getPreferredName(), failOnUnsupportedField);
printBoostAndQueryName(builder);
builder.endObject();
}
@ -839,31 +834,31 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (Field.LIKE.match(currentFieldName)) {
if (LIKE.match(currentFieldName)) {
parseLikeField(parser, likeTexts, likeItems);
} else if (Field.UNLIKE.match(currentFieldName)) {
} else if (UNLIKE.match(currentFieldName)) {
parseLikeField(parser, unlikeTexts, unlikeItems);
} else if (Field.MAX_QUERY_TERMS.match(currentFieldName)) {
} else if (MAX_QUERY_TERMS.match(currentFieldName)) {
maxQueryTerms = parser.intValue();
} else if (Field.MIN_TERM_FREQ.match(currentFieldName)) {
} else if (MIN_TERM_FREQ.match(currentFieldName)) {
minTermFreq =parser.intValue();
} else if (Field.MIN_DOC_FREQ.match(currentFieldName)) {
} else if (MIN_DOC_FREQ.match(currentFieldName)) {
minDocFreq = parser.intValue();
} else if (Field.MAX_DOC_FREQ.match(currentFieldName)) {
} else if (MAX_DOC_FREQ.match(currentFieldName)) {
maxDocFreq = parser.intValue();
} else if (Field.MIN_WORD_LENGTH.match(currentFieldName)) {
} else if (MIN_WORD_LENGTH.match(currentFieldName)) {
minWordLength = parser.intValue();
} else if (Field.MAX_WORD_LENGTH.match(currentFieldName)) {
} else if (MAX_WORD_LENGTH.match(currentFieldName)) {
maxWordLength = parser.intValue();
} else if (Field.ANALYZER.match(currentFieldName)) {
} else if (ANALYZER.match(currentFieldName)) {
analyzer = parser.text();
} else if (Field.MINIMUM_SHOULD_MATCH.match(currentFieldName)) {
} else if (MINIMUM_SHOULD_MATCH.match(currentFieldName)) {
minimumShouldMatch = parser.text();
} else if (Field.BOOST_TERMS.match(currentFieldName)) {
} else if (BOOST_TERMS.match(currentFieldName)) {
boostTerms = parser.floatValue();
} else if (Field.INCLUDE.match(currentFieldName)) {
} else if (INCLUDE.match(currentFieldName)) {
include = parser.booleanValue();
} else if (Field.FAIL_ON_UNSUPPORTED_FIELD.match(currentFieldName)) {
} else if (FAIL_ON_UNSUPPORTED_FIELD.match(currentFieldName)) {
failOnUnsupportedField = parser.booleanValue();
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
@ -873,20 +868,20 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
throw new ParsingException(parser.getTokenLocation(), "[mlt] query does not support [" + currentFieldName + "]");
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (Field.FIELDS.match(currentFieldName)) {
if (FIELDS.match(currentFieldName)) {
fields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
fields.add(parser.text());
}
} else if (Field.LIKE.match(currentFieldName)) {
} else if (LIKE.match(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
parseLikeField(parser, likeTexts, likeItems);
}
} else if (Field.UNLIKE.match(currentFieldName)) {
} else if (UNLIKE.match(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
parseLikeField(parser, unlikeTexts, unlikeItems);
}
} else if (Field.STOP_WORDS.match(currentFieldName)) {
} else if (STOP_WORDS.match(currentFieldName)) {
stopWords = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
stopWords.add(parser.text());
@ -895,9 +890,9 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
throw new ParsingException(parser.getTokenLocation(), "[mlt] query does not support [" + currentFieldName + "]");
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (Field.LIKE.match(currentFieldName)) {
if (LIKE.match(currentFieldName)) {
parseLikeField(parser, likeTexts, likeItems);
} else if (Field.UNLIKE.match(currentFieldName)) {
} else if (UNLIKE.match(currentFieldName)) {
parseLikeField(parser, unlikeTexts, unlikeItems);
} else {
throw new ParsingException(parser.getTokenLocation(), "[mlt] query does not support [" + currentFieldName + "]");

View File

@ -292,7 +292,7 @@ public class BulkRequestTests extends ESTestCase {
builder.field("_index", "index");
builder.field("_type", "type");
builder.field("_id", "id");
builder.field("_version", 1L);
builder.field("version", 1L);
builder.endObject();
builder.endObject();
}
@ -301,7 +301,7 @@ public class BulkRequestTests extends ESTestCase {
builder.startObject();
builder.field("doc", "{}");
Map<String,Object> values = new HashMap<>();
values.put("_version", 2L);
values.put("version", 2L);
values.put("_index", "index");
values.put("_type", "type");
builder.field("upsert", values);

View File

@ -67,7 +67,7 @@ import static org.hamcrest.Matchers.instanceOf;
public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLikeThisQueryBuilder> {
private static final String[] SHUFFLE_PROTECTED_FIELDS = new String[]{Item.Field.DOC.getPreferredName()};
private static final String[] SHUFFLE_PROTECTED_FIELDS = new String[]{MoreLikeThisQueryBuilder.DOC.getPreferredName()};
private static String[] randomFields;
private static Item[] randomLikeItems;
@ -222,7 +222,7 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
@Override
protected Set<String> getObjectsHoldingArbitraryContent() {
//doc contains arbitrary content, anything can be added to it and no exception will be thrown
return Collections.singleton(MoreLikeThisQueryBuilder.Item.Field.DOC.getPreferredName());
return Collections.singleton(MoreLikeThisQueryBuilder.DOC.getPreferredName());
}
@Override

View File

@ -1,4 +1,4 @@
{ "update" : {"_id" : "1", "_retry_on_conflict" : 2} }
{ "update" : {"_id" : "1", "retry_on_conflict" : 2} }
{ "doc" : {"field" : "value"} }
{ "update" : { "_id" : "0", "_type" : "type1", "_index" : "index1" } }
{ "script" : { "source" : "counter += param1", "lang" : "javascript", "params" : {"param1" : 1}}, "upsert" : {"counter" : 1}}

View File

@ -201,16 +201,16 @@ chunks, as this will slow things down.
=== Versioning
Each bulk item can include the version value using the
`_version`/`version` field. It automatically follows the behavior of the
`version` field. It automatically follows the behavior of the
index / delete operation based on the `_version` mapping. It also
support the `version_type`/`_version_type` (see <<index-versioning, versioning>>)
support the `version_type` (see <<index-versioning, versioning>>)
[float]
[[bulk-routing]]
=== Routing
Each bulk item can include the routing value using the
`_routing`/`routing` field. It automatically follows the behavior of the
`routing` field. It automatically follows the behavior of the
index / delete operation based on the `_routing` mapping.
[float]
@ -234,7 +234,7 @@ Control when the changes made by this request are visible to search. See
[[bulk-update]]
=== Update
When using `update` action `_retry_on_conflict` can be used as field in
When using `update` action `retry_on_conflict` can be used as field in
the action itself (not in the extra payload line), to specify how many
times an update should be retried in the case of a version conflict.
@ -246,11 +246,11 @@ the options. Example with update actions:
[source,js]
--------------------------------------------------
POST _bulk
{ "update" : {"_id" : "1", "_type" : "type1", "_index" : "index1", "_retry_on_conflict" : 3} }
{ "update" : {"_id" : "1", "_type" : "type1", "_index" : "index1", "retry_on_conflict" : 3} }
{ "doc" : {"field" : "value"} }
{ "update" : { "_id" : "0", "_type" : "type1", "_index" : "index1", "_retry_on_conflict" : 3} }
{ "update" : { "_id" : "0", "_type" : "type1", "_index" : "index1", "retry_on_conflict" : 3} }
{ "script" : { "source": "ctx._source.counter += params.param1", "lang" : "painless", "params" : {"param1" : 1}}, "upsert" : {"counter" : 1}}
{ "update" : {"_id" : "2", "_type" : "type1", "_index" : "index1", "_retry_on_conflict" : 3} }
{ "update" : {"_id" : "2", "_type" : "type1", "_index" : "index1", "retry_on_conflict" : 3} }
{ "doc" : {"field" : "value"}, "doc_as_upsert" : true }
{ "update" : {"_id" : "3", "_type" : "type1", "_index" : "index1", "_source" : true} }
{ "doc" : {"field" : "value"} }

View File

@ -230,7 +230,7 @@ GET /_mget?routing=key1
"_index" : "test",
"_type" : "type",
"_id" : "1",
"_routing" : "key2"
"routing" : "key2"
},
{
"_index" : "test",

View File

@ -30,6 +30,8 @@ way to reindex old indices is to use the `reindex` API.
* <<breaking_70_mappings_changes>>
* <<breaking_70_search_changes>>
* <<breaking_70_plugins_changes>>
* <<breaking_70_api_changes>>
include::migrate_7_0/aggregations.asciidoc[]
include::migrate_7_0/cluster.asciidoc[]
@ -37,3 +39,5 @@ include::migrate_7_0/indices.asciidoc[]
include::migrate_7_0/mappings.asciidoc[]
include::migrate_7_0/search.asciidoc[]
include::migrate_7_0/plugins.asciidoc[]
include::migrate_7_0/api.asciidoc[]

View File

@ -0,0 +1,24 @@
[[breaking_70_api_changes]]
=== Breaking changes in 7.0
==== Camel case and underscore parameters deprecated in 6.x have been removed
A number of duplicate parameters deprecated in 6.x have been removed from
Bulk request, Multi Get request, Term Vectors request, and More Like This Query
requests.
The following camel case parameters have been removed:
* `opType`
* `versionType`, `_versionType`
The following parameters starting with underscore have been removed:
* `_parent`
* `_retry_on_conflict`
* `_routing`
* `_version`
* `_version_type`
Instead of these removed parameters, use their non camel case equivalents without
starting underscore, e.g. use `version_type` instead of `_version_type` or `versionType`.

View File

@ -0,0 +1,26 @@
---
"Deprecated parameters should fail in Bulk query":
- skip:
version: " - 6.99.99"
reason: some parameters are removed starting from 7.0, their equivalents without underscore are used instead
features: "warnings"
- do:
catch: bad_request
bulk:
body: |
{ "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_1", "_version": 1 } }
{ "doc": { "f1": "v1" } }
{ "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_2", "_version": 1 } }
{ "doc": { "f1": "v2" } }
- do:
catch: bad_request
bulk:
body: |
{ "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_1", "_routing": "test1" } }
{ "doc": { "f1": "v1" } }
{ "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_2", "_routing": "test1" } }
{ "doc": { "f1": "v2" } }

View File

@ -0,0 +1,38 @@
---
"Deprecated parameters should fail in Multi Get query":
- skip:
version: " - 6.99.99"
reason: _version, _routing are removed starting from 7.0, their equivalents without underscore are used instead
features: "warnings"
- do:
index:
index: test_1
type: test
id: 1
body: { foo: bar }
- do:
index:
index: test_1
type: test
id: 2
body: { foo: baz }
- do:
catch: bad_request
mget:
body:
docs:
- { _index: test_1, _type: test, _id: 1, _routing : test1 }
- { _index: test_1, _type: test, _id: 2, _routing : test1 }
- do:
catch: bad_request
mget:
body:
docs:
- { _index: test_1, _type: test, _id: 1, _version : 1 }
- { _index: test_1, _type: test, _id: 2, _version : 1 }

View File

@ -0,0 +1,52 @@
---
"Deprecated camel case and _ parameters should fail in Term Vectors query":
- skip:
version: " - 6.99.99"
reason: camel case and _ parameters (e.g. versionType, _version_type) should fail from 7.0
features: "warnings"
- do:
indices.create:
index: testidx
body:
mappings:
testtype:
properties:
text:
type : "text"
term_vector : "with_positions_offsets"
- do:
index:
index: testidx
type: testtype
id: testing_document
body: {"text" : "The quick brown fox is brown."}
- do:
catch: bad_request
mtermvectors:
"term_statistics" : true
"body" :
"docs":
-
"_index" : "testidx"
"_type" : "testtype"
"_id" : "testing_document"
"version" : 1
"versionType" : "external"
- do:
catch: bad_request
mtermvectors:
"term_statistics" : true
"body" :
"docs":
-
"_index" : "testidx"
"_type" : "testtype"
"_id" : "testing_document"
"version" : 1
"_version_type" : "external"