Using ObjectParser in UpdateRequest (#29293)

CRUD: Parsing changes for UpdateRequest (#29293)

Use `ObjectParser` to parse `UpdateRequest` so we reject unknown fields
and drop support for the `_fields` parameter because it was deprecated
in 5.x.
This commit is contained in:
Ke Li 2018-04-16 20:39:35 +08:00 committed by Nik Everett
parent a004a33803
commit 0bfb59dcf2
18 changed files with 144 additions and 274 deletions

View File

@ -27,7 +27,6 @@ import org.elasticsearch.action.support.ActiveShardCount;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.Requests;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.shard.ShardId;
@ -68,9 +67,7 @@ public class RestNoopBulkAction extends BaseRestHandler {
String defaultIndex = request.param("index");
String defaultType = request.param("type");
String defaultRouting = request.param("routing");
String fieldsParam = request.param("fields");
String defaultPipeline = request.param("pipeline");
String[] defaultFields = fieldsParam != null ? Strings.commaDelimitedListToStringArray(fieldsParam) : null;
String waitForActiveShards = request.param("wait_for_active_shards");
if (waitForActiveShards != null) {
@ -78,7 +75,7 @@ public class RestNoopBulkAction extends BaseRestHandler {
}
bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT));
bulkRequest.setRefreshPolicy(request.param("refresh"));
bulkRequest.add(request.requiredContent(), defaultIndex, defaultType, defaultRouting, defaultFields,
bulkRequest.add(request.requiredContent(), defaultIndex, defaultType, defaultRouting,
null, defaultPipeline, null, true, request.getXContentType());
// short circuit the call to the transport layer

View File

@ -2,7 +2,7 @@
=== Breaking API changes in 7.0
==== Camel case and underscore parameters deprecated in 6.x have been removed
A number of duplicate parameters deprecated in 6.x have been removed from
A number of duplicate parameters deprecated in 6.x have been removed from
Bulk request, Multi Get request, Term Vectors request, and More Like This Query
requests.
@ -22,3 +22,7 @@ The following parameters starting with underscore have been removed:
Instead of these removed parameters, use their non camel case equivalents without
starting underscore, e.g. use `version_type` instead of `_version_type` or `versionType`.
==== The parameter `fields` deprecated in 6.x has been removed from Bulk request
and Update request. The Update API returns `400 - Bad request` if request contains
unknown parameters (instead of ignored in the previous version).

View File

@ -37,10 +37,6 @@
"type" : "string",
"description" : "Default document type for items which don't provide one"
},
"fields": {
"type": "list",
"description" : "Default comma-separated list of fields to return in the response for updates, can be overridden on each sub-request"
},
"_source": {
"type" : "list",
"description" : "True or false to return the _source field or not, or default list of fields to return, can be overridden on each sub-request"

View File

@ -27,10 +27,6 @@
"type": "string",
"description": "Sets the number of shard copies that must be active before proceeding with the update operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1)"
},
"fields": {
"type": "list",
"description": "A comma-separated list of fields to return in the response"
},
"_source": {
"type" : "list",
"description" : "True or false to return the _source field or not, or a list of fields to return"

View File

@ -299,7 +299,7 @@ public class BulkProcessor implements Closeable {
*/
public synchronized BulkProcessor add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType,
@Nullable String defaultPipeline, @Nullable Object payload, XContentType xContentType) throws Exception {
bulkRequest.add(data, defaultIndex, defaultType, null, null, null, defaultPipeline, payload, true, xContentType);
bulkRequest.add(data, defaultIndex, defaultType, null, null, defaultPipeline, payload, true, xContentType);
executeIfNeeded();
return this;
}

View File

@ -36,8 +36,6 @@ import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
@ -66,8 +64,6 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
* @see org.elasticsearch.client.Client#bulk(BulkRequest)
*/
public class BulkRequest extends ActionRequest implements CompositeIndicesRequest, WriteRequest<BulkRequest> {
private static final DeprecationLogger DEPRECATION_LOGGER =
new DeprecationLogger(Loggers.getLogger(BulkRequest.class));
private static final int REQUEST_OVERHEAD = 50;
@ -80,7 +76,6 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
private static final ParseField VERSION_TYPE = new ParseField("version_type");
private static final ParseField RETRY_ON_CONFLICT = new ParseField("retry_on_conflict");
private static final ParseField PIPELINE = new ParseField("pipeline");
private static final ParseField FIELDS = new ParseField("fields");
private static final ParseField SOURCE = new ParseField("_source");
/**
@ -277,7 +272,7 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
*/
public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType,
XContentType xContentType) throws IOException {
return add(data, defaultIndex, defaultType, null, null, null, null, null, true, xContentType);
return add(data, defaultIndex, defaultType, null, null, null, null, true, xContentType);
}
/**
@ -285,12 +280,13 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
*/
public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, boolean allowExplicitIndex,
XContentType xContentType) throws IOException {
return add(data, defaultIndex, defaultType, null, null, null, null, null, allowExplicitIndex, xContentType);
return add(data, defaultIndex, defaultType, null, null, null, null, allowExplicitIndex, xContentType);
}
public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String
defaultRouting, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSourceContext, @Nullable String
defaultPipeline, @Nullable Object payload, boolean allowExplicitIndex, XContentType xContentType) throws IOException {
public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType,
@Nullable String defaultRouting, @Nullable FetchSourceContext defaultFetchSourceContext,
@Nullable String defaultPipeline, @Nullable Object payload, boolean allowExplicitIndex,
XContentType xContentType) throws IOException {
XContent xContent = xContentType.xContent();
int line = 0;
int from = 0;
@ -333,7 +329,6 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
String id = null;
String routing = defaultRouting;
FetchSourceContext fetchSourceContext = defaultFetchSourceContext;
String[] fields = defaultFields;
String opType = null;
long version = Versions.MATCH_ANY;
VersionType versionType = VersionType.INTERNAL;
@ -371,21 +366,14 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
retryOnConflict = parser.intValue();
} else if (PIPELINE.match(currentFieldName, parser.getDeprecationHandler())) {
pipeline = parser.text();
} else if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) {
throw new IllegalArgumentException("Action/metadata line [" + line + "] contains a simple value for parameter [fields] while a list is expected");
} else if (SOURCE.match(currentFieldName, parser.getDeprecationHandler())) {
fetchSourceContext = FetchSourceContext.fromXContent(parser);
} else {
throw new IllegalArgumentException("Action/metadata line [" + line + "] contains an unknown parameter [" + currentFieldName + "]");
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) {
DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead");
List<Object> values = parser.list();
fields = values.toArray(new String[values.size()]);
} else {
throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]");
}
throw new IllegalArgumentException("Malformed action/metadata line [" + line +
"], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]");
} else if (token == XContentParser.Token.START_OBJECT && SOURCE.match(currentFieldName, parser.getDeprecationHandler())) {
fetchSourceContext = FetchSourceContext.fromXContent(parser);
} else if (token != XContentParser.Token.VALUE_NULL) {
@ -435,10 +423,6 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
if (fetchSourceContext != null) {
updateRequest.fetchSource(fetchSourceContext);
}
if (fields != null) {
updateRequest.fields(fields);
}
IndexRequest upsertRequest = updateRequest.upsertRequest();
if (upsertRequest != null) {
upsertRequest.version(version);

View File

@ -291,8 +291,7 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
indexResponse.getId(), indexResponse.getSeqNo(), indexResponse.getPrimaryTerm(), indexResponse.getVersion(),
indexResponse.getResult());
if ((updateRequest.fetchSource() != null && updateRequest.fetchSource().fetchSource()) ||
(updateRequest.fields() != null && updateRequest.fields().length > 0)) {
if (updateRequest.fetchSource() != null && updateRequest.fetchSource().fetchSource()) {
final BytesReference indexSourceAsBytes = updateIndexRequest.source();
final Tuple<XContentType, Map<String, Object>> sourceAndContent =
XContentHelper.convertToMap(indexSourceAsBytes, true, updateIndexRequest.getContentType());

View File

@ -180,8 +180,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
bulkAction.execute(toSingleItemBulkRequest(upsertRequest), wrapBulkResponse(
ActionListener.<IndexResponse>wrap(response -> {
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), response.getResult());
if ((request.fetchSource() != null && request.fetchSource().fetchSource()) ||
(request.fields() != null && request.fields().length > 0)) {
if (request.fetchSource() != null && request.fetchSource().fetchSource()) {
Tuple<XContentType, Map<String, Object>> sourceAndContent =
XContentHelper.convertToMap(upsertSourceBytes, true, upsertRequest.getContentType());
update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes));

View File

@ -29,7 +29,6 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.settings.Settings;
@ -49,7 +48,7 @@ import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.function.LongSupplier;
@ -292,61 +291,33 @@ public class UpdateHelper extends AbstractComponent {
/**
* Applies {@link UpdateRequest#fetchSource()} to the _source of the updated document to be returned in a update response.
* For BWC this function also extracts the {@link UpdateRequest#fields()} from the updated document to be returned in a update response
*/
public static GetResult extractGetResult(final UpdateRequest request, String concreteIndex, long version,
final Map<String, Object> source, XContentType sourceContentType,
@Nullable final BytesReference sourceAsBytes) {
if ((request.fields() == null || request.fields().length == 0) &&
(request.fetchSource() == null || request.fetchSource().fetchSource() == false)) {
if (request.fetchSource() == null || request.fetchSource().fetchSource() == false) {
return null;
}
SourceLookup sourceLookup = new SourceLookup();
sourceLookup.setSource(source);
boolean sourceRequested = false;
Map<String, DocumentField> fields = null;
if (request.fields() != null && request.fields().length > 0) {
for (String field : request.fields()) {
if (field.equals("_source")) {
sourceRequested = true;
continue;
}
Object value = sourceLookup.extractValue(field);
if (value != null) {
if (fields == null) {
fields = new HashMap<>(2);
}
DocumentField documentField = fields.get(field);
if (documentField == null) {
documentField = new DocumentField(field, new ArrayList<>(2));
fields.put(field, documentField);
}
documentField.getValues().add(value);
}
}
}
BytesReference sourceFilteredAsBytes = sourceAsBytes;
if (request.fetchSource() != null && request.fetchSource().fetchSource()) {
sourceRequested = true;
if (request.fetchSource().includes().length > 0 || request.fetchSource().excludes().length > 0) {
Object value = sourceLookup.filter(request.fetchSource());
try {
final int initialCapacity = Math.min(1024, sourceAsBytes.length());
BytesStreamOutput streamOutput = new BytesStreamOutput(initialCapacity);
try (XContentBuilder builder = new XContentBuilder(sourceContentType.xContent(), streamOutput)) {
builder.value(value);
sourceFilteredAsBytes = BytesReference.bytes(builder);
}
} catch (IOException e) {
throw new ElasticsearchException("Error filtering source", e);
if (request.fetchSource().includes().length > 0 || request.fetchSource().excludes().length > 0) {
SourceLookup sourceLookup = new SourceLookup();
sourceLookup.setSource(source);
Object value = sourceLookup.filter(request.fetchSource());
try {
final int initialCapacity = Math.min(1024, sourceAsBytes.length());
BytesStreamOutput streamOutput = new BytesStreamOutput(initialCapacity);
try (XContentBuilder builder = new XContentBuilder(sourceContentType.xContent(), streamOutput)) {
builder.value(value);
sourceFilteredAsBytes = BytesReference.bytes(builder);
}
} catch (IOException e) {
throw new ElasticsearchException("Error filtering source", e);
}
}
// TODO when using delete/none, we can still return the source as bytes by generating it (using the sourceContentType)
return new GetResult(concreteIndex, request.type(), request.id(), version, true,
sourceRequested ? sourceFilteredAsBytes : null, fields);
return new GetResult(concreteIndex, request.type(), request.id(), version, true, sourceFilteredAsBytes, Collections.emptyMap());
}
public static class Result {

View File

@ -19,8 +19,6 @@
package org.elasticsearch.action.update;
import java.util.Arrays;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.DocWriteRequest;
@ -30,11 +28,14 @@ import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.action.support.replication.ReplicationRequest;
import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequest;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -48,15 +49,46 @@ import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.action.ValidateActions.addValidationError;
public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
implements DocWriteRequest<UpdateRequest>, WriteRequest<UpdateRequest>, ToXContentObject {
private static ObjectParser<UpdateRequest, Void> PARSER;
private static final ParseField SCRIPT_FIELD = new ParseField("script");
private static final ParseField SCRIPTED_UPSERT_FIELD = new ParseField("scripted_upsert");
private static final ParseField UPSERT_FIELD = new ParseField("upsert");
private static final ParseField DOC_FIELD = new ParseField("doc");
private static final ParseField DOC_AS_UPSERT_FIELD = new ParseField("doc_as_upsert");
private static final ParseField DETECT_NOOP_FIELD = new ParseField("detect_noop");
private static final ParseField SOURCE_FIELD = new ParseField("_source");
static {
PARSER = new ObjectParser<>(UpdateRequest.class.getSimpleName());
PARSER.declareField((request, script) -> request.script = script,
(parser, context) -> Script.parse(parser), SCRIPT_FIELD, ObjectParser.ValueType.OBJECT_OR_STRING);
PARSER.declareBoolean(UpdateRequest::scriptedUpsert, SCRIPTED_UPSERT_FIELD);
PARSER.declareObject((request, builder) -> request.safeUpsertRequest().source(builder),
(parser, context) -> {
XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType());
builder.copyCurrentStructure(parser);
return builder;
}, UPSERT_FIELD);
PARSER.declareObject((request, builder) -> request.safeDoc().source(builder),
(parser, context) -> {
XContentBuilder docBuilder = XContentFactory.contentBuilder(parser.contentType());
docBuilder.copyCurrentStructure(parser);
return docBuilder;
}, DOC_FIELD);
PARSER.declareBoolean(UpdateRequest::docAsUpsert, DOC_AS_UPSERT_FIELD);
PARSER.declareBoolean(UpdateRequest::detectNoop, DETECT_NOOP_FIELD);
PARSER.declareField(UpdateRequest::fetchSource,
(parser, context) -> FetchSourceContext.fromXContent(parser), SOURCE_FIELD,
ObjectParser.ValueType.OBJECT_ARRAY_BOOLEAN_OR_STRING);
}
private String type;
private String id;
@ -66,7 +98,6 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
@Nullable
Script script;
private String[] fields;
private FetchSourceContext fetchSourceContext;
private long version = Versions.MATCH_ANY;
@ -365,16 +396,6 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
return this;
}
/**
* Explicitly specify the fields that will be returned. By default, nothing is returned.
* @deprecated Use {@link UpdateRequest#fetchSource(String[], String[])} instead
*/
@Deprecated
public UpdateRequest fields(String... fields) {
this.fields = fields;
return this;
}
/**
* Indicate that _source should be returned with every hit, with an
* "include" and/or "exclude" set which can include simple wildcard
@ -389,7 +410,9 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
*/
public UpdateRequest fetchSource(@Nullable String include, @Nullable String exclude) {
FetchSourceContext context = this.fetchSourceContext == null ? FetchSourceContext.FETCH_SOURCE : this.fetchSourceContext;
this.fetchSourceContext = new FetchSourceContext(context.fetchSource(), new String[] {include}, new String[]{exclude});
String[] includes = include == null ? Strings.EMPTY_ARRAY : new String[]{include};
String[] excludes = exclude == null ? Strings.EMPTY_ARRAY : new String[]{exclude};
this.fetchSourceContext = new FetchSourceContext(context.fetchSource(), includes, excludes);
return this;
}
@ -428,16 +451,6 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
return this;
}
/**
* Get the fields to be returned.
* @deprecated Use {@link UpdateRequest#fetchSource()} instead
*/
@Deprecated
public String[] fields() {
return fields;
}
/**
* Gets the {@link FetchSourceContext} which defines how the _source should
* be fetched.
@ -707,49 +720,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
}
public UpdateRequest fromXContent(XContentParser parser) throws IOException {
Script script = null;
XContentParser.Token token = parser.nextToken();
if (token == null) {
return this;
}
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if ("script".equals(currentFieldName)) {
script = Script.parse(parser);
} else if ("scripted_upsert".equals(currentFieldName)) {
scriptedUpsert = parser.booleanValue();
} else if ("upsert".equals(currentFieldName)) {
XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType());
builder.copyCurrentStructure(parser);
safeUpsertRequest().source(builder);
} else if ("doc".equals(currentFieldName)) {
XContentBuilder docBuilder = XContentFactory.contentBuilder(parser.contentType());
docBuilder.copyCurrentStructure(parser);
safeDoc().source(docBuilder);
} else if ("doc_as_upsert".equals(currentFieldName)) {
docAsUpsert(parser.booleanValue());
} else if ("detect_noop".equals(currentFieldName)) {
detectNoop(parser.booleanValue());
} else if ("fields".equals(currentFieldName)) {
List<Object> fields = null;
if (token == XContentParser.Token.START_ARRAY) {
fields = (List) parser.list();
} else if (token.isValue()) {
fields = Collections.singletonList(parser.text());
}
if (fields != null) {
fields(fields.toArray(new String[fields.size()]));
}
} else if ("_source".equals(currentFieldName)) {
fetchSourceContext = FetchSourceContext.fromXContent(parser);
}
}
if (script != null) {
this.script = script;
}
return this;
return PARSER.parse(parser, this, null);
}
public boolean docAsUpsert() {
@ -789,7 +760,12 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
doc = new IndexRequest();
doc.readFrom(in);
}
fields = in.readOptionalStringArray();
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
String[] fields = in.readOptionalStringArray();
if (fields != null) {
throw new IllegalArgumentException("[fields] is no longer supported");
}
}
fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new);
if (in.readBoolean()) {
upsertRequest = new IndexRequest();
@ -812,7 +788,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
out.writeOptionalString(null); // _parent
}
boolean hasScript = script != null;
out.writeBoolean(hasScript);
if (hasScript) {
@ -830,7 +806,9 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
doc.id(id);
doc.writeTo(out);
}
out.writeOptionalStringArray(fields);
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
out.writeOptionalStringArray(null);
}
out.writeOptionalWriteable(fetchSourceContext);
if (upsertRequest == null) {
out.writeBoolean(false);
@ -880,9 +858,6 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
if (detectNoop == false) {
builder.field("detect_noop", detectNoop);
}
if (fields != null) {
builder.array("fields", fields);
}
if (fetchSourceContext != null) {
builder.field("_source", fetchSourceContext);
}
@ -908,9 +883,6 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
}
res.append(", scripted_upsert[").append(scriptedUpsert).append("]");
res.append(", detect_noop[").append(detectNoop).append("]");
if (fields != null) {
res.append(", fields[").append(Arrays.toString(fields)).append("]");
}
return res.append("}").toString();
}
}

View File

@ -26,20 +26,15 @@ import org.elasticsearch.action.support.replication.ReplicationRequest;
import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.rest.action.document.RestUpdateAction;
import org.elasticsearch.script.Script;
import java.util.Map;
public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder<UpdateRequest, UpdateResponse, UpdateRequestBuilder>
implements WriteRequestBuilder<UpdateRequestBuilder> {
private static final DeprecationLogger DEPRECATION_LOGGER =
new DeprecationLogger(Loggers.getLogger(RestUpdateAction.class));
public UpdateRequestBuilder(ElasticsearchClient client, UpdateAction action) {
super(client, action, new UpdateRequest());
@ -87,17 +82,6 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder<U
return this;
}
/**
* Explicitly specify the fields that will be returned. By default, nothing is returned.
* @deprecated Use {@link UpdateRequestBuilder#setFetchSource(String[], String[])} instead
*/
@Deprecated
public UpdateRequestBuilder setFields(String... fields) {
DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead");
request.fields(fields);
return this;
}
/**
* Indicate that _source should be returned with every hit, with an
* "include" and/or "exclude" set which can include simple wildcard

View File

@ -24,9 +24,6 @@ import org.elasticsearch.action.bulk.BulkShardRequest;
import org.elasticsearch.action.support.ActiveShardCount;
import org.elasticsearch.client.Requests;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.rest.BaseRestHandler;
@ -50,8 +47,6 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT;
* </pre>
*/
public class RestBulkAction extends BaseRestHandler {
private static final DeprecationLogger DEPRECATION_LOGGER =
new DeprecationLogger(Loggers.getLogger(RestBulkAction.class));
private final boolean allowExplicitIndex;
@ -80,11 +75,6 @@ public class RestBulkAction extends BaseRestHandler {
String defaultType = request.param("type", MapperService.SINGLE_MAPPING_NAME);
String defaultRouting = request.param("routing");
FetchSourceContext defaultFetchSourceContext = FetchSourceContext.parseFromRestRequest(request);
String fieldsParam = request.param("fields");
if (fieldsParam != null) {
DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead");
}
String[] defaultFields = fieldsParam != null ? Strings.commaDelimitedListToStringArray(fieldsParam) : null;
String defaultPipeline = request.param("pipeline");
String waitForActiveShards = request.param("wait_for_active_shards");
if (waitForActiveShards != null) {
@ -92,7 +82,7 @@ public class RestBulkAction extends BaseRestHandler {
}
bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT));
bulkRequest.setRefreshPolicy(request.param("refresh"));
bulkRequest.add(request.requiredContent(), defaultIndex, defaultType, defaultRouting, defaultFields,
bulkRequest.add(request.requiredContent(), defaultIndex, defaultType, defaultRouting,
defaultFetchSourceContext, defaultPipeline, null, allowExplicitIndex, request.getXContentType());
return channel -> client.bulk(bulkRequest, new RestStatusToXContentListener<>(channel));

View File

@ -23,9 +23,6 @@ import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.ActiveShardCount;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.rest.BaseRestHandler;
@ -40,8 +37,6 @@ import java.io.IOException;
import static org.elasticsearch.rest.RestRequest.Method.POST;
public class RestUpdateAction extends BaseRestHandler {
private static final DeprecationLogger DEPRECATION_LOGGER =
new DeprecationLogger(Loggers.getLogger(RestUpdateAction.class));
public RestUpdateAction(Settings settings, RestController controller) {
super(settings);
@ -65,15 +60,7 @@ public class RestUpdateAction extends BaseRestHandler {
}
updateRequest.docAsUpsert(request.paramAsBoolean("doc_as_upsert", updateRequest.docAsUpsert()));
FetchSourceContext fetchSourceContext = FetchSourceContext.parseFromRestRequest(request);
String sField = request.param("fields");
if (sField != null && fetchSourceContext != null) {
throw new IllegalArgumentException("[fields] and [_source] cannot be used in the same request");
}
if (sField != null) {
DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead");
String[] sFields = Strings.splitStringByCommaToArray(sField);
updateRequest.fields(sFields);
} else if (fetchSourceContext != null) {
if (fetchSourceContext != null) {
updateRequest.fetchSource(fetchSourceContext);
}

View File

@ -94,12 +94,12 @@ public class BulkRequestTests extends ESTestCase {
BulkRequest bulkRequest = new BulkRequest();
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON);
assertThat(bulkRequest.numberOfActions(), equalTo(4));
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).id(), equalTo("1"));
assertThat(bulkRequest.requests().get(0).id(), equalTo("1"));
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).retryOnConflict(), equalTo(2));
assertThat(((UpdateRequest) bulkRequest.requests().get(0)).doc().source().utf8ToString(), equalTo("{\"field\":\"value\"}"));
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).id(), equalTo("0"));
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).type(), equalTo("type1"));
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).index(), equalTo("index1"));
assertThat(bulkRequest.requests().get(1).id(), equalTo("0"));
assertThat(bulkRequest.requests().get(1).type(), equalTo("type1"));
assertThat(bulkRequest.requests().get(1).index(), equalTo("index1"));
Script script = ((UpdateRequest) bulkRequest.requests().get(1)).script();
assertThat(script, notNullValue());
assertThat(script.getIdOrCode(), equalTo("counter += param1"));
@ -107,20 +107,18 @@ public class BulkRequestTests extends ESTestCase {
Map<String, Object> scriptParams = script.getParams();
assertThat(scriptParams, notNullValue());
assertThat(scriptParams.size(), equalTo(1));
assertThat(((Integer) scriptParams.get("param1")), equalTo(1));
assertThat(scriptParams.get("param1"), equalTo(1));
assertThat(((UpdateRequest) bulkRequest.requests().get(1)).upsertRequest().source().utf8ToString(), equalTo("{\"counter\":1}"));
}
public void testBulkAllowExplicitIndex() throws Exception {
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk.json");
try {
new BulkRequest().add(new BytesArray(bulkAction.getBytes(StandardCharsets.UTF_8)), null, null, false, XContentType.JSON);
fail();
} catch (Exception e) {
String bulkAction1 = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk.json");
Exception ex = expectThrows(Exception.class,
() -> new BulkRequest().add(
new BytesArray(bulkAction1.getBytes(StandardCharsets.UTF_8)), null, null, false, XContentType.JSON));
assertEquals("explicit index in bulk is not allowed", ex.getMessage());
}
bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk5.json");
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk5.json");
new BulkRequest().add(new BytesArray(bulkAction.getBytes(StandardCharsets.UTF_8)), "test", null, false, XContentType.JSON);
}
@ -177,6 +175,16 @@ public class BulkRequestTests extends ESTestCase {
assertThat(bulkRequest.numberOfActions(), equalTo(9));
}
public void testBulkActionShouldNotContainArray() throws Exception {
String bulkAction = "{ \"index\":{\"_index\":[\"index1\", \"index2\"],\"_type\":\"type1\",\"_id\":\"1\"} }\r\n"
+ "{ \"field1\" : \"value1\" }\r\n";
BulkRequest bulkRequest = new BulkRequest();
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class,
() -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON));
assertEquals(exc.getMessage(), "Malformed action/metadata line [1]" +
", expected a simple value for field [_index] but found [START_ARRAY]");
}
public void testBulkEmptyObject() throws Exception {
String bulkIndexAction = "{ \"index\":{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\"1\"} }\r\n";
String bulkIndexSource = "{ \"field1\" : \"value1\" }\r\n";
@ -299,7 +307,7 @@ public class BulkRequestTests extends ESTestCase {
out.write(xContentType.xContent().streamSeparator());
try(XContentBuilder builder = XContentFactory.contentBuilder(xContentType, out)) {
builder.startObject();
builder.field("doc", "{}");
builder.startObject("doc").endObject();
Map<String,Object> values = new HashMap<>();
values.put("version", 2L);
values.put("_index", "index");

View File

@ -260,13 +260,13 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
assertThat(bulkResponse.getItems().length, equalTo(3));
bulkResponse = client().prepareBulk()
.add(client().prepareUpdate().setIndex("test").setType("type1").setId("1").setFields("field")
.add(client().prepareUpdate().setIndex("test").setType("type1").setId("1").setFetchSource("field", null)
.setScript(new Script(
ScriptType.INLINE, CustomScriptPlugin.NAME, "throw script exception on unknown var", Collections.emptyMap())))
.add(client().prepareUpdate().setIndex("test").setType("type1").setId("2").setFields("field")
.add(client().prepareUpdate().setIndex("test").setType("type1").setId("2").setFetchSource("field", null)
.setScript(new Script(
ScriptType.INLINE, CustomScriptPlugin.NAME, "ctx._source.field += 1", Collections.emptyMap())))
.add(client().prepareUpdate().setIndex("test").setType("type1").setId("3").setFields("field")
.add(client().prepareUpdate().setIndex("test").setType("type1").setId("3").setFetchSource("field", null)
.setScript(new Script(
ScriptType.INLINE, CustomScriptPlugin.NAME, "throw script exception on unknown var", Collections.emptyMap())))
.execute().actionGet();
@ -279,7 +279,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
assertThat(bulkResponse.getItems()[1].getResponse().getId(), equalTo("2"));
assertThat(bulkResponse.getItems()[1].getResponse().getVersion(), equalTo(2L));
assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getGetResult().field("field").getValue(), equalTo(2));
assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getGetResult().sourceAsMap().get("field"), equalTo(2));
assertThat(bulkResponse.getItems()[1].getFailure(), nullValue());
assertThat(bulkResponse.getItems()[2].getFailure().getId(), equalTo("3"));
@ -303,7 +303,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
builder.add(
client().prepareUpdate()
.setIndex("test").setType("type1").setId(Integer.toString(i))
.setFields("counter")
.setFetchSource("counter", null)
.setScript(script)
.setUpsert(jsonBuilder().startObject().field("counter", 1).endObject()));
}
@ -319,7 +319,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
assertThat(response.getItems()[i].getOpType(), equalTo(OpType.UPDATE));
assertThat(response.getItems()[i].getResponse().getId(), equalTo(Integer.toString(i)));
assertThat(response.getItems()[i].getResponse().getVersion(), equalTo(1L));
assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().field("counter").getValue(), equalTo(1));
assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().sourceAsMap().get("counter"), equalTo(1));
for (int j = 0; j < 5; j++) {
GetResponse getResponse = client().prepareGet("test", "type1", Integer.toString(i)).execute()
@ -333,7 +333,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
builder = client().prepareBulk();
for (int i = 0; i < numDocs; i++) {
UpdateRequestBuilder updateBuilder = client().prepareUpdate().setIndex("test").setType("type1").setId(Integer.toString(i))
.setFields("counter");
.setFetchSource("counter", null);
if (i % 2 == 0) {
updateBuilder.setScript(script);
} else {
@ -357,7 +357,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
assertThat(response.getItems()[i].getOpType(), equalTo(OpType.UPDATE));
assertThat(response.getItems()[i].getResponse().getId(), equalTo(Integer.toString(i)));
assertThat(response.getItems()[i].getResponse().getVersion(), equalTo(2L));
assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().field("counter").getValue(), equalTo(2));
assertThat(((UpdateResponse) response.getItems()[i].getResponse()).getGetResult().sourceAsMap().get("counter"), equalTo(2));
}
builder = client().prepareBulk();

View File

@ -61,7 +61,6 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
import static org.elasticsearch.script.MockScriptEngine.mockInlineScript;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
import static org.hamcrest.Matchers.arrayContaining;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
@ -277,17 +276,26 @@ public class UpdateRequestTests extends ESTestCase {
assertThat(((Map) doc.get("compound")).get("field2").toString(), equalTo("value2"));
}
// Related to issue 15338
public void testFieldsParsing() throws Exception {
UpdateRequest request = new UpdateRequest("test", "type1", "1").fromXContent(
createParser(JsonXContent.jsonXContent, new BytesArray("{\"doc\": {\"field1\": \"value1\"}, \"fields\": \"_source\"}")));
assertThat(request.doc().sourceAsMap().get("field1").toString(), equalTo("value1"));
assertThat(request.fields(), arrayContaining("_source"));
public void testUnknownFieldParsing() throws Exception {
UpdateRequest request = new UpdateRequest("test", "type", "1");
XContentParser contentParser = createParser(XContentFactory.jsonBuilder()
.startObject()
.field("unknown_field", "test")
.endObject());
request = new UpdateRequest("test", "type2", "2").fromXContent(createParser(JsonXContent.jsonXContent,
new BytesArray("{\"doc\": {\"field2\": \"value2\"}, \"fields\": [\"field1\", \"field2\"]}")));
assertThat(request.doc().sourceAsMap().get("field2").toString(), equalTo("value2"));
assertThat(request.fields(), arrayContaining("field1", "field2"));
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> request.fromXContent(contentParser));
assertEquals("[UpdateRequest] unknown field [unknown_field], parser not found", ex.getMessage());
UpdateRequest request2 = new UpdateRequest("test", "type", "1");
XContentParser unknownObject = createParser(XContentFactory.jsonBuilder()
.startObject()
.field("script", "ctx.op = ctx._source.views == params.count ? 'delete' : 'none'")
.startObject("params")
.field("count", 1)
.endObject()
.endObject());
ex = expectThrows(IllegalArgumentException.class, () -> request2.fromXContent(unknownObject));
assertEquals("[UpdateRequest] unknown field [params], parser not found", ex.getMessage());
}
public void testFetchSourceParsing() throws Exception {
@ -444,13 +452,6 @@ public class UpdateRequestTests extends ESTestCase {
BytesReference source = RandomObjects.randomSource(random(), xContentType);
updateRequest.upsert(new IndexRequest().source(source, xContentType));
}
if (randomBoolean()) {
String[] fields = new String[randomIntBetween(0, 5)];
for (int i = 0; i < fields.length; i++) {
fields[i] = randomAlphaOfLength(5);
}
updateRequest.fields(fields);
}
if (randomBoolean()) {
if (randomBoolean()) {
updateRequest.fetchSource(randomBoolean());
@ -487,10 +488,8 @@ public class UpdateRequestTests extends ESTestCase {
assertEquals(updateRequest.detectNoop(), parsedUpdateRequest.detectNoop());
assertEquals(updateRequest.docAsUpsert(), parsedUpdateRequest.docAsUpsert());
assertEquals(updateRequest.docAsUpsert(), parsedUpdateRequest.docAsUpsert());
assertEquals(updateRequest.script(), parsedUpdateRequest.script());
assertEquals(updateRequest.scriptedUpsert(), parsedUpdateRequest.scriptedUpsert());
assertArrayEquals(updateRequest.fields(), parsedUpdateRequest.fields());
assertEquals(updateRequest.fetchSource(), parsedUpdateRequest.fetchSource());
BytesReference finalBytes = toXContent(parsedUpdateRequest, xContentType, humanReadable);

View File

@ -225,7 +225,7 @@ public class UpdateIT extends ESIntegTestCase {
UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
.setDoc(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject())
.setDocAsUpsert(true)
.setFields("_source")
.setFetchSource(true)
.execute().actionGet();
assertThat(updateResponse.getIndex(), equalTo("test"));
assertThat(updateResponse.getGetResult(), notNullValue());
@ -241,7 +241,7 @@ public class UpdateIT extends ESIntegTestCase {
assertThrows(client().prepareUpdate(indexOrAlias(), "type1", "1")
.setDoc(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject())
.setDocAsUpsert(false)
.setFields("_source")
.setFetchSource(true)
.execute(), DocumentMissingException.class);
}
@ -264,7 +264,7 @@ public class UpdateIT extends ESIntegTestCase {
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
.setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject())
.setScript(new Script(ScriptType.INLINE, UPDATE_SCRIPTS, PUT_VALUES_SCRIPT, Collections.singletonMap("extra", "foo")))
.setFields("_source")
.setFetchSource(true)
.execute().actionGet();
assertThat(updateResponse.getIndex(), equalTo("test"));
@ -293,12 +293,9 @@ public class UpdateIT extends ESIntegTestCase {
ensureGreen();
Script fieldIncScript = new Script(ScriptType.INLINE, UPDATE_SCRIPTS, FIELD_INC_SCRIPT, Collections.singletonMap("field", "field"));
try {
client().prepareUpdate(indexOrAlias(), "type1", "1").setScript(fieldIncScript).execute().actionGet();
fail();
} catch (DocumentMissingException e) {
// all is well
}
DocumentMissingException ex = expectThrows(DocumentMissingException.class,
() -> client().prepareUpdate(indexOrAlias(), "type1", "1").setScript(fieldIncScript).execute().actionGet());
assertEquals("[type1][1]: document missing", ex.getMessage());
client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet();
@ -353,19 +350,6 @@ public class UpdateIT extends ESIntegTestCase {
assertThat(getResponse.isExists(), equalTo(false));
}
// check fields parameter
client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet();
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
.setScript(fieldIncScript)
.setFields("field")
.setFetchSource(true)
.execute().actionGet();
assertThat(updateResponse.getIndex(), equalTo("test"));
assertThat(updateResponse.getGetResult(), notNullValue());
assertThat(updateResponse.getGetResult().getIndex(), equalTo("test"));
assertThat(updateResponse.getGetResult().sourceRef(), notNullValue());
assertThat(updateResponse.getGetResult().field("field").getValue(), notNullValue());
// check _source parameter
client().prepareIndex("test", "type1", "1").setSource("field1", 1, "field2", 2).execute().actionGet();
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
@ -383,7 +367,7 @@ public class UpdateIT extends ESIntegTestCase {
// check updates without script
// add new field
client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet();
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("field2", 2).endObject()).execute().actionGet();
client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("field2", 2).endObject()).execute().actionGet();
for (int i = 0; i < 5; i++) {
GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("1"));
@ -391,7 +375,7 @@ public class UpdateIT extends ESIntegTestCase {
}
// change existing field
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("field", 3).endObject()).execute().actionGet();
client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("field", 3).endObject()).execute().actionGet();
for (int i = 0; i < 5; i++) {
GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo("3"));
@ -409,7 +393,7 @@ public class UpdateIT extends ESIntegTestCase {
testMap.put("map1", 8);
client().prepareIndex("test", "type1", "1").setSource("map", testMap).execute().actionGet();
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("map", testMap3).endObject()).execute().actionGet();
client().prepareUpdate(indexOrAlias(), "type1", "1").setDoc(XContentFactory.jsonBuilder().startObject().field("map", testMap3).endObject()).execute().actionGet();
for (int i = 0; i < 5; i++) {
GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
Map map1 = (Map) getResponse.getSourceAsMap().get("map");
@ -581,7 +565,7 @@ public class UpdateIT extends ESIntegTestCase {
assertThat(response.getId(), equalTo(Integer.toString(i)));
assertThat(response.isExists(), equalTo(true));
assertThat(response.getVersion(), equalTo((long) numberOfThreads));
assertThat((Integer) response.getSource().get("field"), equalTo(numberOfThreads));
assertThat(response.getSource().get("field"), equalTo(numberOfThreads));
}
}

View File

@ -248,7 +248,7 @@ public class UpdateNoopIT extends ESIntegTestCase {
UpdateRequestBuilder updateRequest = client().prepareUpdate("test", "type1", "1")
.setDoc(xContentBuilder)
.setDocAsUpsert(true)
.setFields("_source");
.setFetchSource(true);
if (detectNoop != null) {
updateRequest.setDetectNoop(detectNoop);
}