Fixed naming inconsistency for fields/stored_fields in the APIs (#20166)

This change replaces the fields parameter with stored_fields when it makes sense.
This is dictated by the renaming we made in #18943 for the search API.

The following list of endpoint has been changed to use `stored_fields` instead of `fields`:
* get
* mget
* explain

The documentation and the rest API spec has been updated to cope with the changes for the following APIs:
* delete_by_query
* get
* mget
* explain

The `fields` parameter has been deprecated for the following APIs (it is replaced by _source filtering):
* update: the fields are extracted from the _source directly.
* bulk: the fields parameter is used but fields are extracted from the source directly so it is allowed to have non-stored fields.

Some APIs still have the `fields` parameter for various reasons:
* cat.fielddata: the fields paramaters relates to the fielddata fields that should be printed.
* indices.clear_cache: used to indicate which fielddata fields should be cleared.
* indices.get_field_mapping: used to filter fields in the mapping.
* indices.stats: get stats on fields (stored or not stored).
* termvectors: fields are retrieved from the stored fields if possible and extracted from the _source otherwise.
* mtermvectors:
* nodes.stats: the fields parameter is used to concatenate completion_fields and fielddata_fields so it's not related to stored_fields at all.

Fixes #20155
This commit is contained in:
Jim Ferenczi 2016-09-13 20:54:41 +02:00 committed by GitHub
parent fbe27664a6
commit 1764ec56b3
80 changed files with 921 additions and 499 deletions

View File

@ -72,7 +72,7 @@ public class RestNoopBulkAction extends BaseRestHandler {
} }
bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT)); bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT));
bulkRequest.setRefreshPolicy(request.param("refresh")); bulkRequest.setRefreshPolicy(request.param("refresh"));
bulkRequest.add(request.content(), defaultIndex, defaultType, defaultRouting, defaultFields, defaultPipeline, null, true); bulkRequest.add(request.content(), defaultIndex, defaultType, defaultRouting, defaultFields, null, defaultPipeline, null, true);
// short circuit the call to the transport layer // short circuit the call to the transport layer
BulkRestBuilderListener listener = new BulkRestBuilderListener(channel, request); BulkRestBuilderListener listener = new BulkRestBuilderListener(channel, request);

View File

@ -293,7 +293,7 @@ public class BulkProcessor implements Closeable {
} }
public synchronized BulkProcessor add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String defaultPipeline, @Nullable Object payload) throws Exception { public synchronized BulkProcessor add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String defaultPipeline, @Nullable Object payload) throws Exception {
bulkRequest.add(data, defaultIndex, defaultType, null, null, defaultPipeline, payload, true); bulkRequest.add(data, defaultIndex, defaultType, null, null, null, defaultPipeline, payload, true);
executeIfNeeded(); executeIfNeeded();
return this; return this;
} }

View File

@ -35,12 +35,15 @@ import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.VersionType; import org.elasticsearch.index.VersionType;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -57,6 +60,8 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
* @see org.elasticsearch.client.Client#bulk(BulkRequest) * @see org.elasticsearch.client.Client#bulk(BulkRequest)
*/ */
public class BulkRequest extends ActionRequest<BulkRequest> implements CompositeIndicesRequest, WriteRequest<BulkRequest> { public class BulkRequest extends ActionRequest<BulkRequest> implements CompositeIndicesRequest, WriteRequest<BulkRequest> {
private static final DeprecationLogger DEPRECATION_LOGGER =
new DeprecationLogger(Loggers.getLogger(BulkRequest.class));
private static final int REQUEST_OVERHEAD = 50; private static final int REQUEST_OVERHEAD = 50;
@ -257,17 +262,17 @@ public class BulkRequest extends ActionRequest<BulkRequest> implements Composite
* Adds a framed data in binary format * Adds a framed data in binary format
*/ */
public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType) throws Exception { public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType) throws Exception {
return add(data, defaultIndex, defaultType, null, null, null, null, true); return add(data, defaultIndex, defaultType, null, null, null, null, null, true);
} }
/** /**
* Adds a framed data in binary format * Adds a framed data in binary format
*/ */
public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, boolean allowExplicitIndex) throws Exception { public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, boolean allowExplicitIndex) throws Exception {
return add(data, defaultIndex, defaultType, null, null, null, null, allowExplicitIndex); return add(data, defaultIndex, defaultType, null, null, null, null, null, allowExplicitIndex);
} }
public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String defaultRouting, @Nullable String[] defaultFields, @Nullable String defaultPipeline, @Nullable Object payload, boolean allowExplicitIndex) throws Exception { public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType, @Nullable String defaultRouting, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSourceContext, @Nullable String defaultPipeline, @Nullable Object payload, boolean allowExplicitIndex) throws Exception {
XContent xContent = XContentFactory.xContent(data); XContent xContent = XContentFactory.xContent(data);
int line = 0; int line = 0;
int from = 0; int from = 0;
@ -301,6 +306,7 @@ public class BulkRequest extends ActionRequest<BulkRequest> implements Composite
String id = null; String id = null;
String routing = defaultRouting; String routing = defaultRouting;
String parent = null; String parent = null;
FetchSourceContext fetchSourceContext = defaultFetchSourceContext;
String[] fields = defaultFields; String[] fields = defaultFields;
String timestamp = null; String timestamp = null;
TimeValue ttl = null; TimeValue ttl = null;
@ -353,16 +359,21 @@ public class BulkRequest extends ActionRequest<BulkRequest> implements Composite
pipeline = parser.text(); pipeline = parser.text();
} else if ("fields".equals(currentFieldName)) { } else if ("fields".equals(currentFieldName)) {
throw new IllegalArgumentException("Action/metadata line [" + line + "] contains a simple value for parameter [fields] while a list is expected"); throw new IllegalArgumentException("Action/metadata line [" + line + "] contains a simple value for parameter [fields] while a list is expected");
} else if ("_source".equals(currentFieldName)) {
fetchSourceContext = FetchSourceContext.parse(parser);
} else { } else {
throw new IllegalArgumentException("Action/metadata line [" + line + "] contains an unknown parameter [" + currentFieldName + "]"); throw new IllegalArgumentException("Action/metadata line [" + line + "] contains an unknown parameter [" + currentFieldName + "]");
} }
} else if (token == XContentParser.Token.START_ARRAY) { } else if (token == XContentParser.Token.START_ARRAY) {
if ("fields".equals(currentFieldName)) { if ("fields".equals(currentFieldName)) {
DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead");
List<Object> values = parser.list(); List<Object> values = parser.list();
fields = values.toArray(new String[values.size()]); fields = values.toArray(new String[values.size()]);
} else { } else {
throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]"); throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]");
} }
} else if (token == XContentParser.Token.START_OBJECT && "_source".equals(currentFieldName)) {
fetchSourceContext = FetchSourceContext.parse(parser);
} else if (token != XContentParser.Token.VALUE_NULL) { } else if (token != XContentParser.Token.VALUE_NULL) {
throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]"); throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]");
} }
@ -402,7 +413,10 @@ public class BulkRequest extends ActionRequest<BulkRequest> implements Composite
.version(version).versionType(versionType) .version(version).versionType(versionType)
.routing(routing) .routing(routing)
.parent(parent) .parent(parent)
.source(data.slice(from, nextMarker - from)); .fromXContent(data.slice(from, nextMarker - from));
if (fetchSourceContext != null) {
updateRequest.fetchSource(fetchSourceContext);
}
if (fields != null) { if (fields != null) {
updateRequest.fields(fields); updateRequest.fields(fields);
} }

View File

@ -251,7 +251,8 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
// add the response // add the response
IndexResponse indexResponse = result.getResponse(); IndexResponse indexResponse = result.getResponse();
UpdateResponse updateResponse = new UpdateResponse(indexResponse.getShardInfo(), indexResponse.getShardId(), indexResponse.getType(), indexResponse.getId(), indexResponse.getVersion(), indexResponse.getResult()); UpdateResponse updateResponse = new UpdateResponse(indexResponse.getShardInfo(), indexResponse.getShardId(), indexResponse.getType(), indexResponse.getId(), indexResponse.getVersion(), indexResponse.getResult());
if (updateRequest.fields() != null && updateRequest.fields().length > 0) { if ((updateRequest.fetchSource() != null && updateRequest.fetchSource().fetchSource()) ||
(updateRequest.fields() != null && updateRequest.fields().length > 0)) {
Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(indexSourceAsBytes, true); Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(indexSourceAsBytes, true);
updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, request.index(), indexResponse.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), indexSourceAsBytes)); updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, request.index(), indexResponse.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), indexSourceAsBytes));
} }

View File

@ -40,7 +40,7 @@ public class ExplainRequest extends SingleShardRequest<ExplainRequest> {
private String routing; private String routing;
private String preference; private String preference;
private QueryBuilder query; private QueryBuilder query;
private String[] fields; private String[] storedFields;
private FetchSourceContext fetchSourceContext; private FetchSourceContext fetchSourceContext;
private String[] filteringAlias = Strings.EMPTY_ARRAY; private String[] filteringAlias = Strings.EMPTY_ARRAY;
@ -122,12 +122,12 @@ public class ExplainRequest extends SingleShardRequest<ExplainRequest> {
} }
public String[] fields() { public String[] storedFields() {
return fields; return storedFields;
} }
public ExplainRequest fields(String[] fields) { public ExplainRequest storedFields(String[] fields) {
this.fields = fields; this.storedFields = fields;
return this; return this;
} }
@ -167,8 +167,8 @@ public class ExplainRequest extends SingleShardRequest<ExplainRequest> {
preference = in.readOptionalString(); preference = in.readOptionalString();
query = in.readNamedWriteable(QueryBuilder.class); query = in.readNamedWriteable(QueryBuilder.class);
filteringAlias = in.readStringArray(); filteringAlias = in.readStringArray();
fields = in.readOptionalStringArray(); storedFields = in.readOptionalStringArray();
fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new); fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new);
nowInMillis = in.readVLong(); nowInMillis = in.readVLong();
} }
@ -181,8 +181,8 @@ public class ExplainRequest extends SingleShardRequest<ExplainRequest> {
out.writeOptionalString(preference); out.writeOptionalString(preference);
out.writeNamedWriteable(query); out.writeNamedWriteable(query);
out.writeStringArray(filteringAlias); out.writeStringArray(filteringAlias);
out.writeOptionalStringArray(fields); out.writeOptionalStringArray(storedFields);
out.writeOptionalStreamable(fetchSourceContext); out.writeOptionalWriteable(fetchSourceContext);
out.writeVLong(nowInMillis); out.writeVLong(nowInMillis);
} }
} }

View File

@ -88,10 +88,10 @@ public class ExplainRequestBuilder extends SingleShardOperationRequestBuilder<Ex
} }
/** /**
* Explicitly specify the fields that will be returned for the explained document. By default, nothing is returned. * Explicitly specify the stored fields that will be returned for the explained document. By default, nothing is returned.
*/ */
public ExplainRequestBuilder setFields(String... fields) { public ExplainRequestBuilder setStoredFields(String... fields) {
request.fields(fields); request.storedFields(fields);
return this; return this;
} }

View File

@ -106,12 +106,11 @@ public class TransportExplainAction extends TransportSingleShardAction<ExplainRe
Rescorer rescorer = ctx.rescorer(); Rescorer rescorer = ctx.rescorer();
explanation = rescorer.explain(topLevelDocId, context, ctx, explanation); explanation = rescorer.explain(topLevelDocId, context, ctx, explanation);
} }
if (request.fields() != null || (request.fetchSourceContext() != null && request.fetchSourceContext().fetchSource())) { if (request.storedFields() != null || (request.fetchSourceContext() != null && request.fetchSourceContext().fetchSource())) {
// Advantage is that we're not opening a second searcher to retrieve the _source. Also // Advantage is that we're not opening a second searcher to retrieve the _source. Also
// because we are working in the same searcher in engineGetResult we can be sure that a // because we are working in the same searcher in engineGetResult we can be sure that a
// doc isn't deleted between the initial get and this call. // doc isn't deleted between the initial get and this call.
GetResult getResult = context.indexShard().getService().get(result, request.id(), request.type(), request.fields(), GetResult getResult = context.indexShard().getService().get(result, request.id(), request.type(), request.storedFields(), request.fetchSourceContext());
request.fetchSourceContext());
return new ExplainResponse(shardId.getIndexName(), request.type(), request.id(), true, explanation, getResult); return new ExplainResponse(shardId.getIndexName(), request.type(), request.id(), true, explanation, getResult);
} else { } else {
return new ExplainResponse(shardId.getIndexName(), request.type(), request.id(), true, explanation); return new ExplainResponse(shardId.getIndexName(), request.type(), request.id(), true, explanation);

View File

@ -51,7 +51,7 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
private String parent; private String parent;
private String preference; private String preference;
private String[] fields; private String[] storedFields;
private FetchSourceContext fetchSourceContext; private FetchSourceContext fetchSourceContext;
@ -186,20 +186,20 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
} }
/** /**
* Explicitly specify the fields that will be returned. By default, the <tt>_source</tt> * Explicitly specify the stored fields that will be returned. By default, the <tt>_source</tt>
* field will be returned. * field will be returned.
*/ */
public GetRequest fields(String... fields) { public GetRequest storedFields(String... fields) {
this.fields = fields; this.storedFields = fields;
return this; return this;
} }
/** /**
* Explicitly specify the fields that will be returned. By default, the <tt>_source</tt> * Explicitly specify the stored fields that will be returned. By default, the <tt>_source</tt>
* field will be returned. * field will be returned.
*/ */
public String[] fields() { public String[] storedFields() {
return this.fields; return this.storedFields;
} }
/** /**
@ -260,18 +260,12 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
parent = in.readOptionalString(); parent = in.readOptionalString();
preference = in.readOptionalString(); preference = in.readOptionalString();
refresh = in.readBoolean(); refresh = in.readBoolean();
int size = in.readInt(); storedFields = in.readOptionalStringArray();
if (size >= 0) {
fields = new String[size];
for (int i = 0; i < size; i++) {
fields[i] = in.readString();
}
}
realtime = in.readBoolean(); realtime = in.readBoolean();
this.versionType = VersionType.fromValue(in.readByte()); this.versionType = VersionType.fromValue(in.readByte());
this.version = in.readLong(); this.version = in.readLong();
fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new); fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new);
} }
@Override @Override
@ -284,18 +278,11 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
out.writeOptionalString(preference); out.writeOptionalString(preference);
out.writeBoolean(refresh); out.writeBoolean(refresh);
if (fields == null) { out.writeOptionalStringArray(storedFields);
out.writeInt(-1);
} else {
out.writeInt(fields.length);
for (String field : fields) {
out.writeString(field);
}
}
out.writeBoolean(realtime); out.writeBoolean(realtime);
out.writeByte(versionType.getValue()); out.writeByte(versionType.getValue());
out.writeLong(version); out.writeLong(version);
out.writeOptionalStreamable(fetchSourceContext); out.writeOptionalWriteable(fetchSourceContext);
} }
@Override @Override

View File

@ -88,8 +88,8 @@ public class GetRequestBuilder extends SingleShardOperationRequestBuilder<GetReq
* Explicitly specify the fields that will be returned. By default, the <tt>_source</tt> * Explicitly specify the fields that will be returned. By default, the <tt>_source</tt>
* field will be returned. * field will be returned.
*/ */
public GetRequestBuilder setFields(String... fields) { public GetRequestBuilder setStoredFields(String... fields) {
request.fields(fields); request.storedFields(fields);
return this; return this;
} }

View File

@ -134,14 +134,26 @@ public class GetResponse extends ActionResponse implements Iterable<GetField>, T
return getResult.getSource(); return getResult.getSource();
} }
/**
* @deprecated Use {@link GetResponse#getSource()} instead
*/
@Deprecated
public Map<String, GetField> getFields() { public Map<String, GetField> getFields() {
return getResult.getFields(); return getResult.getFields();
} }
/**
* @deprecated Use {@link GetResponse#getSource()} instead
*/
@Deprecated
public GetField getField(String name) { public GetField getField(String name) {
return getResult.field(name); return getResult.field(name);
} }
/**
* @deprecated Use {@link GetResponse#getSource()} instead
*/
@Deprecated
@Override @Override
public Iterator<GetField> iterator() { public Iterator<GetField> iterator() {
return getResult.iterator(); return getResult.iterator();

View File

@ -28,6 +28,7 @@ import org.elasticsearch.action.RealtimeRequest;
import org.elasticsearch.action.ValidateActions; import org.elasticsearch.action.ValidateActions;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
@ -58,7 +59,7 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
private String id; private String id;
private String routing; private String routing;
private String parent; private String parent;
private String[] fields; private String[] storedFields;
private long version = Versions.MATCH_ANY; private long version = Versions.MATCH_ANY;
private VersionType versionType = VersionType.INTERNAL; private VersionType versionType = VersionType.INTERNAL;
private FetchSourceContext fetchSourceContext; private FetchSourceContext fetchSourceContext;
@ -136,13 +137,13 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
return parent; return parent;
} }
public Item fields(String... fields) { public Item storedFields(String... fields) {
this.fields = fields; this.storedFields = fields;
return this; return this;
} }
public String[] fields() { public String[] storedFields() {
return this.fields; return this.storedFields;
} }
public long version() { public long version() {
@ -188,17 +189,11 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
id = in.readString(); id = in.readString();
routing = in.readOptionalString(); routing = in.readOptionalString();
parent = in.readOptionalString(); parent = in.readOptionalString();
int size = in.readVInt(); storedFields = in.readOptionalStringArray();
if (size > 0) {
fields = new String[size];
for (int i = 0; i < size; i++) {
fields[i] = in.readString();
}
}
version = in.readLong(); version = in.readLong();
versionType = VersionType.fromValue(in.readByte()); versionType = VersionType.fromValue(in.readByte());
fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new); fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new);
} }
@Override @Override
@ -208,19 +203,11 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
out.writeString(id); out.writeString(id);
out.writeOptionalString(routing); out.writeOptionalString(routing);
out.writeOptionalString(parent); out.writeOptionalString(parent);
if (fields == null) { out.writeOptionalStringArray(storedFields);
out.writeVInt(0);
} else {
out.writeVInt(fields.length);
for (String field : fields) {
out.writeString(field);
}
}
out.writeLong(version); out.writeLong(version);
out.writeByte(versionType.getValue()); out.writeByte(versionType.getValue());
out.writeOptionalStreamable(fetchSourceContext); out.writeOptionalWriteable(fetchSourceContext);
} }
@Override @Override
@ -233,7 +220,7 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
if (version != item.version) return false; if (version != item.version) return false;
if (fetchSourceContext != null ? !fetchSourceContext.equals(item.fetchSourceContext) : item.fetchSourceContext != null) if (fetchSourceContext != null ? !fetchSourceContext.equals(item.fetchSourceContext) : item.fetchSourceContext != null)
return false; return false;
if (!Arrays.equals(fields, item.fields)) return false; if (!Arrays.equals(storedFields, item.storedFields)) return false;
if (!id.equals(item.id)) return false; if (!id.equals(item.id)) return false;
if (!index.equals(item.index)) return false; if (!index.equals(item.index)) return false;
if (routing != null ? !routing.equals(item.routing) : item.routing != null) return false; if (routing != null ? !routing.equals(item.routing) : item.routing != null) return false;
@ -251,7 +238,7 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
result = 31 * result + id.hashCode(); result = 31 * result + id.hashCode();
result = 31 * result + (routing != null ? routing.hashCode() : 0); result = 31 * result + (routing != null ? routing.hashCode() : 0);
result = 31 * result + (parent != null ? parent.hashCode() : 0); result = 31 * result + (parent != null ? parent.hashCode() : 0);
result = 31 * result + (fields != null ? Arrays.hashCode(fields) : 0); result = 31 * result + (storedFields != null ? Arrays.hashCode(storedFields) : 0);
result = 31 * result + Long.hashCode(version); result = 31 * result + Long.hashCode(version);
result = 31 * result + versionType.hashCode(); result = 31 * result + versionType.hashCode();
result = 31 * result + (fetchSourceContext != null ? fetchSourceContext.hashCode() : 0); result = 31 * result + (fetchSourceContext != null ? fetchSourceContext.hashCode() : 0);
@ -379,7 +366,7 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
String id = null; String id = null;
String routing = defaultRouting; String routing = defaultRouting;
String parent = null; String parent = null;
List<String> fields = null; List<String> storedFields = null;
long version = Versions.MATCH_ANY; long version = Versions.MATCH_ANY;
VersionType versionType = VersionType.INTERNAL; VersionType versionType = VersionType.INTERNAL;
@ -403,8 +390,11 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
} else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) { } else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) {
parent = parser.text(); parent = parser.text();
} else if ("fields".equals(currentFieldName)) { } else if ("fields".equals(currentFieldName)) {
fields = new ArrayList<>(); throw new ParsingException(parser.getTokenLocation(),
fields.add(parser.text()); "Unsupported field [fields] used, expected [stored_fields] instead");
} else if ("stored_fields".equals(currentFieldName)) {
storedFields = new ArrayList<>();
storedFields.add(parser.text());
} else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) { } else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) {
version = parser.longValue(); version = parser.longValue();
} else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) { } else if ("_version_type".equals(currentFieldName) || "_versionType".equals(currentFieldName) || "version_type".equals(currentFieldName) || "versionType".equals(currentFieldName)) {
@ -420,9 +410,12 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
} }
} else if (token == XContentParser.Token.START_ARRAY) { } else if (token == XContentParser.Token.START_ARRAY) {
if ("fields".equals(currentFieldName)) { if ("fields".equals(currentFieldName)) {
fields = new ArrayList<>(); throw new ParsingException(parser.getTokenLocation(),
"Unsupported field [fields] used, expected [stored_fields] instead");
} else if ("stored_fields".equals(currentFieldName)) {
storedFields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
fields.add(parser.text()); storedFields.add(parser.text());
} }
} else if ("_source".equals(currentFieldName)) { } else if ("_source".equals(currentFieldName)) {
ArrayList<String> includes = new ArrayList<>(); ArrayList<String> includes = new ArrayList<>();
@ -464,12 +457,12 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
} }
} }
String[] aFields; String[] aFields;
if (fields != null) { if (storedFields != null) {
aFields = fields.toArray(new String[fields.size()]); aFields = storedFields.toArray(new String[storedFields.size()]);
} else { } else {
aFields = defaultFields; aFields = defaultFields;
} }
items.add(new Item(index, type, id).routing(routing).fields(aFields).parent(parent).version(version).versionType(versionType) items.add(new Item(index, type, id).routing(routing).storedFields(aFields).parent(parent).version(version).versionType(versionType)
.fetchSourceContext(fetchSourceContext == null ? defaultFetchSource : fetchSourceContext)); .fetchSourceContext(fetchSourceContext == null ? defaultFetchSource : fetchSourceContext));
} }
} }
@ -484,7 +477,7 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
if (!token.isValue()) { if (!token.isValue()) {
throw new IllegalArgumentException("ids array element should only contain ids"); throw new IllegalArgumentException("ids array element should only contain ids");
} }
items.add(new Item(defaultIndex, defaultType, parser.text()).fields(defaultFields).fetchSourceContext(defaultFetchSource).routing(defaultRouting)); items.add(new Item(defaultIndex, defaultType, parser.text()).storedFields(defaultFields).fetchSourceContext(defaultFetchSource).routing(defaultRouting));
} }
} }

View File

@ -92,7 +92,7 @@ public class TransportGetAction extends TransportSingleShardAction<GetRequest, G
indexShard.refresh("refresh_flag_get"); indexShard.refresh("refresh_flag_get");
} }
GetResult result = indexShard.getService().get(request.type(), request.id(), request.fields(), GetResult result = indexShard.getService().get(request.type(), request.id(), request.storedFields(),
request.realtime(), request.version(), request.versionType(), request.fetchSourceContext()); request.realtime(), request.version(), request.versionType(), request.fetchSourceContext());
return new GetResponse(result); return new GetResponse(result);
} }

View File

@ -88,7 +88,7 @@ public class TransportShardMultiGetAction extends TransportSingleShardAction<Mul
for (int i = 0; i < request.locations.size(); i++) { for (int i = 0; i < request.locations.size(); i++) {
MultiGetRequest.Item item = request.items.get(i); MultiGetRequest.Item item = request.items.get(i);
try { try {
GetResult getResult = indexShard.getService().get(item.type(), item.id(), item.fields(), request.realtime(), item.version(), GetResult getResult = indexShard.getService().get(item.type(), item.id(), item.storedFields(), request.realtime(), item.version(),
item.versionType(), item.fetchSourceContext()); item.versionType(), item.fetchSourceContext());
response.add(request.locations.get(i), new GetResponse(getResult)); response.add(request.locations.get(i), new GetResponse(getResult));
} catch (Exception e) { } catch (Exception e) {

View File

@ -180,7 +180,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
super(item.index()); super(item.index());
this.id = item.id(); this.id = item.id();
this.type = item.type(); this.type = item.type();
this.selectedFields(item.fields()); this.selectedFields(item.storedFields());
this.routing(item.routing()); this.routing(item.routing());
this.parent(item.parent()); this.parent(item.parent());
} }

View File

@ -186,7 +186,8 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
@Override @Override
public void onResponse(IndexResponse response) { public void onResponse(IndexResponse response) {
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getResult()); UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getResult());
if (request.fields() != null && request.fields().length > 0) { if ((request.fetchSource() != null && request.fetchSource().fetchSource()) ||
(request.fields() != null && request.fields().length > 0)) {
Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(upsertSourceBytes, true); Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(upsertSourceBytes, true);
update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes)); update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes));
} else { } else {

View File

@ -19,6 +19,7 @@
package org.elasticsearch.action.update; package org.elasticsearch.action.update;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequest;
@ -28,9 +29,11 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.VersionType; import org.elasticsearch.index.VersionType;
@ -51,6 +54,7 @@ import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
@ -267,17 +271,19 @@ public class UpdateHelper extends AbstractComponent {
} }
/** /**
* Extracts the fields from the updated document to be returned in a update response * Applies {@link UpdateRequest#fetchSource()} to the _source of the updated document to be returned in a update response.
* For BWC this function also extracts the {@link UpdateRequest#fields()} from the updated document to be returned in a update response
*/ */
public GetResult extractGetResult(final UpdateRequest request, String concreteIndex, long version, final Map<String, Object> source, XContentType sourceContentType, @Nullable final BytesReference sourceAsBytes) { public GetResult extractGetResult(final UpdateRequest request, String concreteIndex, long version, final Map<String, Object> source, XContentType sourceContentType, @Nullable final BytesReference sourceAsBytes) {
if (request.fields() == null || request.fields().length == 0) { if ((request.fields() == null || request.fields().length == 0) &&
(request.fetchSource() == null || request.fetchSource().fetchSource() == false)) {
return null; return null;
} }
SourceLookup sourceLookup = new SourceLookup();
sourceLookup.setSource(source);
boolean sourceRequested = false; boolean sourceRequested = false;
Map<String, GetField> fields = null; Map<String, GetField> fields = null;
if (request.fields() != null && request.fields().length > 0) { if (request.fields() != null && request.fields().length > 0) {
SourceLookup sourceLookup = new SourceLookup();
sourceLookup.setSource(source);
for (String field : request.fields()) { for (String field : request.fields()) {
if (field.equals("_source")) { if (field.equals("_source")) {
sourceRequested = true; sourceRequested = true;
@ -298,8 +304,26 @@ public class UpdateHelper extends AbstractComponent {
} }
} }
BytesReference sourceFilteredAsBytes = sourceAsBytes;
if (request.fetchSource() != null && request.fetchSource().fetchSource()) {
sourceRequested = true;
if (request.fetchSource().includes().length > 0 || request.fetchSource().excludes().length > 0) {
Object value = sourceLookup.filter(request.fetchSource().includes(), request.fetchSource().excludes());
try {
final int initialCapacity = Math.min(1024, sourceAsBytes.length());
BytesStreamOutput streamOutput = new BytesStreamOutput(initialCapacity);
try (XContentBuilder builder = new XContentBuilder(sourceContentType.xContent(), streamOutput)) {
builder.value(value);
sourceFilteredAsBytes = builder.bytes();
}
} catch (IOException e) {
throw new ElasticsearchException("Error filtering source", e);
}
}
}
// TODO when using delete/none, we can still return the source as bytes by generating it (using the sourceContentType) // TODO when using delete/none, we can still return the source as bytes by generating it (using the sourceContentType)
return new GetResult(concreteIndex, request.type(), request.id(), version, true, sourceRequested ? sourceAsBytes : null, fields); return new GetResult(concreteIndex, request.type(), request.id(), version, true, sourceRequested ? sourceFilteredAsBytes : null, fields);
} }
public static class Result { public static class Result {

View File

@ -32,6 +32,8 @@ import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -42,6 +44,7 @@ import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import java.io.IOException; import java.io.IOException;
import java.util.Collections; import java.util.Collections;
@ -55,6 +58,8 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
*/ */
public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest> public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
implements DocumentRequest<UpdateRequest>, WriteRequest<UpdateRequest> { implements DocumentRequest<UpdateRequest>, WriteRequest<UpdateRequest> {
private static final DeprecationLogger DEPRECATION_LOGGER =
new DeprecationLogger(Loggers.getLogger(UpdateRequest.class));
private String type; private String type;
private String id; private String id;
@ -68,6 +73,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
Script script; Script script;
private String[] fields; private String[] fields;
private FetchSourceContext fetchSourceContext;
private long version = Versions.MATCH_ANY; private long version = Versions.MATCH_ANY;
private VersionType versionType = VersionType.INTERNAL; private VersionType versionType = VersionType.INTERNAL;
@ -373,17 +379,80 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
/** /**
* Explicitly specify the fields that will be returned. By default, nothing is returned. * Explicitly specify the fields that will be returned. By default, nothing is returned.
* @deprecated Use {@link UpdateRequest#fetchSource(String[], String[])} instead
*/ */
@Deprecated
public UpdateRequest fields(String... fields) { public UpdateRequest fields(String... fields) {
this.fields = fields; this.fields = fields;
return this; return this;
} }
/** /**
* Get the fields to be returned. * Indicate that _source should be returned with every hit, with an
* "include" and/or "exclude" set which can include simple wildcard
* elements.
*
* @param include
* An optional include (optionally wildcarded) pattern to filter
* the returned _source
* @param exclude
* An optional exclude (optionally wildcarded) pattern to filter
* the returned _source
*/ */
public UpdateRequest fetchSource(@Nullable String include, @Nullable String exclude) {
this.fetchSourceContext = new FetchSourceContext(include, exclude);
return this;
}
/**
* Indicate that _source should be returned, with an
* "include" and/or "exclude" set which can include simple wildcard
* elements.
*
* @param includes
* An optional list of include (optionally wildcarded) pattern to
* filter the returned _source
* @param excludes
* An optional list of exclude (optionally wildcarded) pattern to
* filter the returned _source
*/
public UpdateRequest fetchSource(@Nullable String[] includes, @Nullable String[] excludes) {
this.fetchSourceContext = new FetchSourceContext(includes, excludes);
return this;
}
/**
* Indicates whether the response should contain the updated _source.
*/
public UpdateRequest fetchSource(boolean fetchSource) {
this.fetchSourceContext = new FetchSourceContext(fetchSource);
return this;
}
/**
* Explicitely set the fetch source context for this request
*/
public UpdateRequest fetchSource(FetchSourceContext context) {
this.fetchSourceContext = context;
return this;
}
/**
* Get the fields to be returned.
* @deprecated Use {@link UpdateRequest#fetchSource()} instead
*/
@Deprecated
public String[] fields() { public String[] fields() {
return this.fields; return fields;
}
/**
* Gets the {@link FetchSourceContext} which defines how the _source should
* be fetched.
*/
public FetchSourceContext fetchSource() {
return fetchSourceContext;
} }
/** /**
@ -618,16 +687,16 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
return upsertRequest; return upsertRequest;
} }
public UpdateRequest source(XContentBuilder source) throws Exception { public UpdateRequest fromXContent(XContentBuilder source) throws Exception {
return source(source.bytes()); return fromXContent(source.bytes());
} }
public UpdateRequest source(byte[] source) throws Exception { public UpdateRequest fromXContent(byte[] source) throws Exception {
return source(source, 0, source.length); return fromXContent(source, 0, source.length);
} }
public UpdateRequest source(byte[] source, int offset, int length) throws Exception { public UpdateRequest fromXContent(byte[] source, int offset, int length) throws Exception {
return source(new BytesArray(source, offset, length)); return fromXContent(new BytesArray(source, offset, length));
} }
/** /**
@ -646,7 +715,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
return detectNoop; return detectNoop;
} }
public UpdateRequest source(BytesReference source) throws Exception { public UpdateRequest fromXContent(BytesReference source) throws Exception {
Script script = null; Script script = null;
try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) { try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) {
XContentParser.Token token = parser.nextToken(); XContentParser.Token token = parser.nextToken();
@ -685,6 +754,8 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
if (fields != null) { if (fields != null) {
fields(fields.toArray(new String[fields.size()])); fields(fields.toArray(new String[fields.size()]));
} }
} else if ("_source".equals(currentFieldName)) {
fetchSourceContext = FetchSourceContext.parse(parser);
} }
} }
if (script != null) { if (script != null) {
@ -729,13 +800,8 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
doc = new IndexRequest(); doc = new IndexRequest();
doc.readFrom(in); doc.readFrom(in);
} }
int size = in.readInt(); fields = in.readOptionalStringArray();
if (size >= 0) { fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new);
fields = new String[size];
for (int i = 0; i < size; i++) {
fields[i] = in.readString();
}
}
if (in.readBoolean()) { if (in.readBoolean()) {
upsertRequest = new IndexRequest(); upsertRequest = new IndexRequest();
upsertRequest.readFrom(in); upsertRequest.readFrom(in);
@ -772,14 +838,8 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
doc.id(id); doc.id(id);
doc.writeTo(out); doc.writeTo(out);
} }
if (fields == null) { out.writeOptionalStringArray(fields);
out.writeInt(-1); out.writeOptionalWriteable(fetchSourceContext);
} else {
out.writeInt(fields.length);
for (String field : fields) {
out.writeString(field);
}
}
if (upsertRequest == null) { if (upsertRequest == null) {
out.writeBoolean(false); out.writeBoolean(false);
} else { } else {

View File

@ -25,17 +25,22 @@ import org.elasticsearch.action.support.WriteRequestBuilder;
import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.action.support.replication.ReplicationRequest;
import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequestBuilder; import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.VersionType; import org.elasticsearch.index.VersionType;
import org.elasticsearch.rest.action.document.RestUpdateAction;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import java.util.Map; import java.util.Map;
public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder<UpdateRequest, UpdateResponse, UpdateRequestBuilder> public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder<UpdateRequest, UpdateResponse, UpdateRequestBuilder>
implements WriteRequestBuilder<UpdateRequestBuilder> { implements WriteRequestBuilder<UpdateRequestBuilder> {
private static final DeprecationLogger DEPRECATION_LOGGER =
new DeprecationLogger(Loggers.getLogger(RestUpdateAction.class));
public UpdateRequestBuilder(ElasticsearchClient client, UpdateAction action) { public UpdateRequestBuilder(ElasticsearchClient client, UpdateAction action) {
super(client, action, new UpdateRequest()); super(client, action, new UpdateRequest());
@ -90,12 +95,57 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder<U
/** /**
* Explicitly specify the fields that will be returned. By default, nothing is returned. * Explicitly specify the fields that will be returned. By default, nothing is returned.
* @deprecated Use {@link UpdateRequestBuilder#setFetchSource(String[], String[])} instead
*/ */
@Deprecated
public UpdateRequestBuilder setFields(String... fields) { public UpdateRequestBuilder setFields(String... fields) {
DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead");
request.fields(fields); request.fields(fields);
return this; return this;
} }
/**
* Indicate that _source should be returned with every hit, with an
* "include" and/or "exclude" set which can include simple wildcard
* elements.
*
* @param include
* An optional include (optionally wildcarded) pattern to filter
* the returned _source
* @param exclude
* An optional exclude (optionally wildcarded) pattern to filter
* the returned _source
*/
public UpdateRequestBuilder setFetchSource(@Nullable String include, @Nullable String exclude) {
request.fetchSource(include, exclude);
return this;
}
/**
* Indicate that _source should be returned, with an
* "include" and/or "exclude" set which can include simple wildcard
* elements.
*
* @param includes
* An optional list of include (optionally wildcarded) pattern to
* filter the returned _source
* @param excludes
* An optional list of exclude (optionally wildcarded) pattern to
* filter the returned _source
*/
public UpdateRequestBuilder setFetchSource(@Nullable String[] includes, @Nullable String[] excludes) {
request.fetchSource(includes, excludes);
return this;
}
/**
* Indicates whether the response should contain the updated _source.
*/
public UpdateRequestBuilder setFetchSource(boolean fetchSource) {
request.fetchSource(fetchSource);
return this;
}
/** /**
* Sets the number of retries of a version conflict occurs because the document was updated between * Sets the number of retries of a version conflict occurs because the document was updated between
* getting it and updating it. Defaults to 0. * getting it and updating it. Defaults to 0.
@ -279,26 +329,6 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder<U
return this; return this;
} }
public UpdateRequestBuilder setSource(XContentBuilder source) throws Exception {
request.source(source);
return this;
}
public UpdateRequestBuilder setSource(byte[] source) throws Exception {
request.source(source);
return this;
}
public UpdateRequestBuilder setSource(byte[] source, int offset, int length) throws Exception {
request.source(source, offset, length);
return this;
}
public UpdateRequestBuilder setSource(BytesReference source) throws Exception {
request.source(source);
return this;
}
/** /**
* Sets whether the specified doc parameter should be used as upsert document. * Sets whether the specified doc parameter should be used as upsert document.
*/ */

View File

@ -28,6 +28,7 @@ import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException; import java.io.IOException;
@ -229,7 +230,7 @@ public class GetResult implements Streamable, Iterable<GetField>, ToXContent {
builder.field(Fields.FOUND, exists); builder.field(Fields.FOUND, exists);
if (source != null) { if (source != null) {
XContentHelper.writeRawField("_source", source, builder, params); XContentHelper.writeRawField(SourceFieldMapper.NAME, source, builder, params);
} }
if (!otherFields.isEmpty()) { if (!otherFields.isEmpty()) {

View File

@ -94,7 +94,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
ObjectParser.ValueType.OBJECT_ARRAY); ObjectParser.ValueType.OBJECT_ARRAY);
PARSER.declareField((p, i, c) -> { PARSER.declareField((p, i, c) -> {
try { try {
i.setFetchSourceContext(FetchSourceContext.parse(c)); i.setFetchSourceContext(FetchSourceContext.parse(c.parser()));
} catch (IOException e) { } catch (IOException e) {
throw new ParsingException(p.getTokenLocation(), "Could not parse inner _source definition", e); throw new ParsingException(p.getTokenLocation(), "Could not parse inner _source definition", e);
} }
@ -219,7 +219,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
scriptFields.add(new ScriptField(in)); scriptFields.add(new ScriptField(in));
} }
} }
fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new); fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new);
if (in.readBoolean()) { if (in.readBoolean()) {
int size = in.readVInt(); int size = in.readVInt();
sorts = new ArrayList<>(size); sorts = new ArrayList<>(size);
@ -258,7 +258,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
scriptField.writeTo(out); scriptField.writeTo(out);
} }
} }
out.writeOptionalStreamable(fetchSourceContext); out.writeOptionalWriteable(fetchSourceContext);
boolean hasSorts = sorts != null; boolean hasSorts = sorts != null;
out.writeBoolean(hasSorts); out.writeBoolean(hasSorts);
if (hasSorts) { if (hasSorts) {

View File

@ -258,8 +258,12 @@ public class TermVectorsService {
for (Map.Entry<String, Collection<Object>> entry : values.entrySet()) { for (Map.Entry<String, Collection<Object>> entry : values.entrySet()) {
String field = entry.getKey(); String field = entry.getKey();
Analyzer analyzer = getAnalyzerAtField(indexShard, field, perFieldAnalyzer); Analyzer analyzer = getAnalyzerAtField(indexShard, field, perFieldAnalyzer);
for (Object text : entry.getValue()) { if (entry.getValue() instanceof List) {
index.addField(field, text.toString(), analyzer); for (Object text : entry.getValue()) {
index.addField(field, text.toString(), analyzer);
}
} else {
index.addField(field, entry.getValue().toString(), analyzer);
} }
} }
/* and read vectors from it */ /* and read vectors from it */

View File

@ -24,10 +24,12 @@ import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.bulk.BulkShardRequest;
import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.ActiveShardCount;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.client.Requests; import org.elasticsearch.client.Requests;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BaseRestHandler;
@ -37,6 +39,7 @@ import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestResponse;
import org.elasticsearch.rest.action.RestBuilderListener; import org.elasticsearch.rest.action.RestBuilderListener;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.POST;
import static org.elasticsearch.rest.RestRequest.Method.PUT; import static org.elasticsearch.rest.RestRequest.Method.PUT;
@ -52,6 +55,8 @@ import static org.elasticsearch.rest.RestStatus.OK;
* </pre> * </pre>
*/ */
public class RestBulkAction extends BaseRestHandler { public class RestBulkAction extends BaseRestHandler {
private static final DeprecationLogger DEPRECATION_LOGGER =
new DeprecationLogger(Loggers.getLogger(RestBulkAction.class));
private final boolean allowExplicitIndex; private final boolean allowExplicitIndex;
@ -75,18 +80,21 @@ public class RestBulkAction extends BaseRestHandler {
String defaultIndex = request.param("index"); String defaultIndex = request.param("index");
String defaultType = request.param("type"); String defaultType = request.param("type");
String defaultRouting = request.param("routing"); String defaultRouting = request.param("routing");
FetchSourceContext defaultFetchSourceContext = FetchSourceContext.parseFromRestRequest(request);
String fieldsParam = request.param("fields"); String fieldsParam = request.param("fields");
String defaultPipeline = request.param("pipeline"); if (fieldsParam != null) {
DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead");
}
String[] defaultFields = fieldsParam != null ? Strings.commaDelimitedListToStringArray(fieldsParam) : null; String[] defaultFields = fieldsParam != null ? Strings.commaDelimitedListToStringArray(fieldsParam) : null;
String defaultPipeline = request.param("pipeline");
String waitForActiveShards = request.param("wait_for_active_shards"); String waitForActiveShards = request.param("wait_for_active_shards");
if (waitForActiveShards != null) { if (waitForActiveShards != null) {
bulkRequest.waitForActiveShards(ActiveShardCount.parseString(waitForActiveShards)); bulkRequest.waitForActiveShards(ActiveShardCount.parseString(waitForActiveShards));
} }
bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT)); bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT));
bulkRequest.setRefreshPolicy(request.param("refresh")); bulkRequest.setRefreshPolicy(request.param("refresh"));
bulkRequest.add(request.content(), defaultIndex, defaultType, defaultRouting, defaultFields, defaultPipeline, bulkRequest.add(request.content(), defaultIndex, defaultType, defaultRouting, defaultFields,
null, allowExplicitIndex); defaultFetchSourceContext, defaultPipeline, null, allowExplicitIndex);
client.bulk(bulkRequest, new RestBuilderListener<BulkResponse>(channel) { client.bulk(bulkRequest, new RestBuilderListener<BulkResponse>(channel) {
@Override @Override

View File

@ -58,12 +58,15 @@ public class RestGetAction extends BaseRestHandler {
getRequest.parent(request.param("parent")); getRequest.parent(request.param("parent"));
getRequest.preference(request.param("preference")); getRequest.preference(request.param("preference"));
getRequest.realtime(request.paramAsBoolean("realtime", getRequest.realtime())); getRequest.realtime(request.paramAsBoolean("realtime", getRequest.realtime()));
if (request.param("fields") != null) {
String sField = request.param("fields"); throw new IllegalArgumentException("The parameter [fields] is no longer supported, " +
"please use [stored_fields] to retrieve stored fields or or [_source] to load the field from _source");
}
String sField = request.param("stored_fields");
if (sField != null) { if (sField != null) {
String[] sFields = Strings.splitStringByCommaToArray(sField); String[] sFields = Strings.splitStringByCommaToArray(sField);
if (sFields != null) { if (sFields != null) {
getRequest.fields(sFields); getRequest.storedFields(sFields);
} }
} }

View File

@ -91,7 +91,7 @@ public abstract class RestHeadAction extends BaseRestHandler {
getRequest.preference(request.param("preference")); getRequest.preference(request.param("preference"));
getRequest.realtime(request.paramAsBoolean("realtime", getRequest.realtime())); getRequest.realtime(request.paramAsBoolean("realtime", getRequest.realtime()));
// don't get any fields back... // don't get any fields back...
getRequest.fields(Strings.EMPTY_ARRAY); getRequest.storedFields(Strings.EMPTY_ARRAY);
// TODO we can also just return the document size as Content-Length // TODO we can also just return the document size as Content-Length
client.get(getRequest, new RestResponseListener<GetResponse>(channel) { client.get(getRequest, new RestResponseListener<GetResponse>(channel) {

View File

@ -59,9 +59,12 @@ public class RestMultiGetAction extends BaseRestHandler {
multiGetRequest.refresh(request.paramAsBoolean("refresh", multiGetRequest.refresh())); multiGetRequest.refresh(request.paramAsBoolean("refresh", multiGetRequest.refresh()));
multiGetRequest.preference(request.param("preference")); multiGetRequest.preference(request.param("preference"));
multiGetRequest.realtime(request.paramAsBoolean("realtime", multiGetRequest.realtime())); multiGetRequest.realtime(request.paramAsBoolean("realtime", multiGetRequest.realtime()));
if (request.param("fields") != null) {
throw new IllegalArgumentException("The parameter [fields] is no longer supported, " +
"please use [stored_fields] to retrieve stored fields or _source filtering if the field is not stored");
}
String[] sFields = null; String[] sFields = null;
String sField = request.param("fields"); String sField = request.param("stored_fields");
if (sField != null) { if (sField != null) {
sFields = Strings.splitStringByCommaToArray(sField); sFields = Strings.splitStringByCommaToArray(sField);
} }

View File

@ -25,6 +25,8 @@ import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.VersionType; import org.elasticsearch.index.VersionType;
import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BaseRestHandler;
@ -33,12 +35,15 @@ import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.action.RestActions; import org.elasticsearch.rest.action.RestActions;
import org.elasticsearch.rest.action.RestStatusToXContentListener; import org.elasticsearch.rest.action.RestStatusToXContentListener;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.POST;
/** /**
*/ */
public class RestUpdateAction extends BaseRestHandler { public class RestUpdateAction extends BaseRestHandler {
private static final DeprecationLogger DEPRECATION_LOGGER =
new DeprecationLogger(Loggers.getLogger(RestUpdateAction.class));
@Inject @Inject
public RestUpdateAction(Settings settings, RestController controller) { public RestUpdateAction(Settings settings, RestController controller) {
@ -58,13 +63,19 @@ public class RestUpdateAction extends BaseRestHandler {
updateRequest.waitForActiveShards(ActiveShardCount.parseString(waitForActiveShards)); updateRequest.waitForActiveShards(ActiveShardCount.parseString(waitForActiveShards));
} }
updateRequest.docAsUpsert(request.paramAsBoolean("doc_as_upsert", updateRequest.docAsUpsert())); updateRequest.docAsUpsert(request.paramAsBoolean("doc_as_upsert", updateRequest.docAsUpsert()));
FetchSourceContext fetchSourceContext = FetchSourceContext.parseFromRestRequest(request);
String sField = request.param("fields"); String sField = request.param("fields");
if (sField != null) { if (sField != null && fetchSourceContext != null) {
String[] sFields = Strings.splitStringByCommaToArray(sField); throw new IllegalArgumentException("[fields] and [_source] cannot be used in the same request");
if (sFields != null) {
updateRequest.fields(sFields);
}
} }
if (sField != null) {
DEPRECATION_LOGGER.deprecated("Deprecated field [fields] used, expected [_source] instead");
String[] sFields = Strings.splitStringByCommaToArray(sField);
updateRequest.fields(sFields);
} else if (fetchSourceContext != null) {
updateRequest.fetchSource(fetchSourceContext);
}
updateRequest.retryOnConflict(request.paramAsInt("retry_on_conflict", updateRequest.retryOnConflict())); updateRequest.retryOnConflict(request.paramAsInt("retry_on_conflict", updateRequest.retryOnConflict()));
updateRequest.version(RestActions.parseVersion(request)); updateRequest.version(RestActions.parseVersion(request));
updateRequest.versionType(VersionType.fromString(request.param("version_type"), updateRequest.versionType())); updateRequest.versionType(VersionType.fromString(request.param("version_type"), updateRequest.versionType()));
@ -72,7 +83,7 @@ public class RestUpdateAction extends BaseRestHandler {
// see if we have it in the body // see if we have it in the body
if (request.hasContent()) { if (request.hasContent()) {
updateRequest.source(request.content()); updateRequest.fromXContent(request.content());
IndexRequest upsertRequest = updateRequest.upsertRequest(); IndexRequest upsertRequest = updateRequest.upsertRequest();
if (upsertRequest != null) { if (upsertRequest != null) {
upsertRequest.routing(request.param("routing")); upsertRequest.routing(request.param("routing"));

View File

@ -78,11 +78,15 @@ public class RestExplainAction extends BaseRestHandler {
explainRequest.query(query); explainRequest.query(query);
} }
String sField = request.param("fields"); if (request.param("fields") != null) {
throw new IllegalArgumentException("The parameter [fields] is no longer supported, " +
"please use [stored_fields] to retrieve stored fields");
}
String sField = request.param("stored_fields");
if (sField != null) { if (sField != null) {
String[] sFields = Strings.splitStringByCommaToArray(sField); String[] sFields = Strings.splitStringByCommaToArray(sField);
if (sFields != null) { if (sFields != null) {
explainRequest.fields(sFields); explainRequest.storedFields(sFields);
} }
} }

View File

@ -79,7 +79,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
public TopHitsAggregationBuilder(StreamInput in) throws IOException { public TopHitsAggregationBuilder(StreamInput in) throws IOException {
super(in, TYPE); super(in, TYPE);
explain = in.readBoolean(); explain = in.readBoolean();
fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new); fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new);
if (in.readBoolean()) { if (in.readBoolean()) {
int size = in.readVInt(); int size = in.readVInt();
fieldDataFields = new ArrayList<>(size); fieldDataFields = new ArrayList<>(size);
@ -112,7 +112,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
@Override @Override
protected void doWriteTo(StreamOutput out) throws IOException { protected void doWriteTo(StreamOutput out) throws IOException {
out.writeBoolean(explain); out.writeBoolean(explain);
out.writeOptionalStreamable(fetchSourceContext); out.writeOptionalWriteable(fetchSourceContext);
boolean hasFieldDataFields = fieldDataFields != null; boolean hasFieldDataFields = fieldDataFields != null;
out.writeBoolean(hasFieldDataFields); out.writeBoolean(hasFieldDataFields);
if (hasFieldDataFields) { if (hasFieldDataFields) {
@ -596,7 +596,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
} else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder.TRACK_SCORES_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder.TRACK_SCORES_FIELD)) {
factory.trackScores(parser.booleanValue()); factory.trackScores(parser.booleanValue());
} else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder._SOURCE_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder._SOURCE_FIELD)) {
factory.fetchSource(FetchSourceContext.parse(context)); factory.fetchSource(FetchSourceContext.parse(context.parser()));
} else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder.STORED_FIELDS_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder.STORED_FIELDS_FIELD)) {
factory.storedFieldsContext = factory.storedFieldsContext =
StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), context); StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), context);
@ -608,7 +608,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
} }
} else if (token == XContentParser.Token.START_OBJECT) { } else if (token == XContentParser.Token.START_OBJECT) {
if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder._SOURCE_FIELD)) { if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder._SOURCE_FIELD)) {
factory.fetchSource(FetchSourceContext.parse(context)); factory.fetchSource(FetchSourceContext.parse(context.parser()));
} else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder.SCRIPT_FIELDS_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder.SCRIPT_FIELDS_FIELD)) {
List<ScriptField> scriptFields = new ArrayList<>(); List<ScriptField> scriptFields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
@ -680,7 +680,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
List<SortBuilder<?>> sorts = SortBuilder.fromXContent(context); List<SortBuilder<?>> sorts = SortBuilder.fromXContent(context);
factory.sorts(sorts); factory.sorts(sorts);
} else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder._SOURCE_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder._SOURCE_FIELD)) {
factory.fetchSource(FetchSourceContext.parse(context)); factory.fetchSource(FetchSourceContext.parse(context.parser()));
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
parser.getTokenLocation()); parser.getTokenLocation());

View File

@ -187,7 +187,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
public SearchSourceBuilder(StreamInput in) throws IOException { public SearchSourceBuilder(StreamInput in) throws IOException {
aggregations = in.readOptionalWriteable(AggregatorFactories.Builder::new); aggregations = in.readOptionalWriteable(AggregatorFactories.Builder::new);
explain = in.readOptionalBoolean(); explain = in.readOptionalBoolean();
fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new); fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::new);
docValueFields = (List<String>) in.readGenericValue(); docValueFields = (List<String>) in.readGenericValue();
storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new); storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new);
from = in.readVInt(); from = in.readVInt();
@ -234,7 +234,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalWriteable(aggregations); out.writeOptionalWriteable(aggregations);
out.writeOptionalBoolean(explain); out.writeOptionalBoolean(explain);
out.writeOptionalStreamable(fetchSourceContext); out.writeOptionalWriteable(fetchSourceContext);
out.writeGenericValue(docValueFields); out.writeGenericValue(docValueFields);
out.writeOptionalWriteable(storedFieldsContext); out.writeOptionalWriteable(storedFieldsContext);
out.writeVInt(from); out.writeVInt(from);
@ -961,7 +961,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} else if (context.getParseFieldMatcher().match(currentFieldName, TRACK_SCORES_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, TRACK_SCORES_FIELD)) {
trackScores = parser.booleanValue(); trackScores = parser.booleanValue();
} else if (context.getParseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) {
fetchSourceContext = FetchSourceContext.parse(context); fetchSourceContext = FetchSourceContext.parse(context.parser());
} else if (context.getParseFieldMatcher().match(currentFieldName, STORED_FIELDS_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, STORED_FIELDS_FIELD)) {
storedFieldsContext = storedFieldsContext =
StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), context); StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), context);
@ -983,7 +983,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} else if (context.getParseFieldMatcher().match(currentFieldName, POST_FILTER_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, POST_FILTER_FIELD)) {
postQueryBuilder = context.parseInnerQueryBuilder().orElse(null); postQueryBuilder = context.parseInnerQueryBuilder().orElse(null);
} else if (context.getParseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) {
fetchSourceContext = FetchSourceContext.parse(context); fetchSourceContext = FetchSourceContext.parse(context.parser());
} else if (context.getParseFieldMatcher().match(currentFieldName, SCRIPT_FIELDS_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, SCRIPT_FIELDS_FIELD)) {
scriptFields = new ArrayList<>(); scriptFields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
@ -1068,7 +1068,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} }
} }
} else if (context.getParseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) {
fetchSourceContext = FetchSourceContext.parse(context); fetchSourceContext = FetchSourceContext.parse(context.parser());
} else if (context.getParseFieldMatcher().match(currentFieldName, SEARCH_AFTER)) { } else if (context.getParseFieldMatcher().match(currentFieldName, SEARCH_AFTER)) {
searchAfterBuilder = SearchAfterBuilder.fromXContent(parser, context.getParseFieldMatcher()); searchAfterBuilder = SearchAfterBuilder.fromXContent(parser, context.getParseFieldMatcher());
} else if (context.getParseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) {

View File

@ -21,15 +21,15 @@ package org.elasticsearch.search.fetch.subphase;
import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest;
import java.io.IOException; import java.io.IOException;
@ -40,7 +40,7 @@ import java.util.List;
/** /**
* Context used to fetch the {@code _source}. * Context used to fetch the {@code _source}.
*/ */
public class FetchSourceContext implements Streamable, ToXContent { public class FetchSourceContext implements Writeable, ToXContent {
public static final ParseField INCLUDES_FIELD = new ParseField("includes", "include"); public static final ParseField INCLUDES_FIELD = new ParseField("includes", "include");
public static final ParseField EXCLUDES_FIELD = new ParseField("excludes", "exclude"); public static final ParseField EXCLUDES_FIELD = new ParseField("excludes", "exclude");
@ -51,9 +51,9 @@ public class FetchSourceContext implements Streamable, ToXContent {
private String[] includes; private String[] includes;
private String[] excludes; private String[] excludes;
public static FetchSourceContext parse(QueryParseContext context) throws IOException { public static FetchSourceContext parse(XContentParser parser) throws IOException {
FetchSourceContext fetchSourceContext = new FetchSourceContext(); FetchSourceContext fetchSourceContext = new FetchSourceContext();
fetchSourceContext.fromXContent(context); fetchSourceContext.fromXContent(parser, ParseFieldMatcher.STRICT);
return fetchSourceContext; return fetchSourceContext;
} }
@ -88,6 +88,19 @@ public class FetchSourceContext implements Streamable, ToXContent {
this.excludes = excludes == null ? Strings.EMPTY_ARRAY : excludes; this.excludes = excludes == null ? Strings.EMPTY_ARRAY : excludes;
} }
public FetchSourceContext(StreamInput in) throws IOException {
fetchSource = in.readBoolean();
includes = in.readStringArray();
excludes = in.readStringArray();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(fetchSource);
out.writeStringArray(includes);
out.writeStringArray(excludes);
}
public boolean fetchSource() { public boolean fetchSource() {
return this.fetchSource; return this.fetchSource;
} }
@ -148,8 +161,7 @@ public class FetchSourceContext implements Streamable, ToXContent {
return null; return null;
} }
public void fromXContent(QueryParseContext context) throws IOException { public void fromXContent(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token = parser.currentToken(); XContentParser.Token token = parser.currentToken();
boolean fetchSource = true; boolean fetchSource = true;
String[] includes = Strings.EMPTY_ARRAY; String[] includes = Strings.EMPTY_ARRAY;
@ -170,7 +182,7 @@ public class FetchSourceContext implements Streamable, ToXContent {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) { } else if (token == XContentParser.Token.START_ARRAY) {
if (context.getParseFieldMatcher().match(currentFieldName, INCLUDES_FIELD)) { if (parseFieldMatcher.match(currentFieldName, INCLUDES_FIELD)) {
List<String> includesList = new ArrayList<>(); List<String> includesList = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.VALUE_STRING) { if (token == XContentParser.Token.VALUE_STRING) {
@ -181,7 +193,7 @@ public class FetchSourceContext implements Streamable, ToXContent {
} }
} }
includes = includesList.toArray(new String[includesList.size()]); includes = includesList.toArray(new String[includesList.size()]);
} else if (context.getParseFieldMatcher().match(currentFieldName, EXCLUDES_FIELD)) { } else if (parseFieldMatcher.match(currentFieldName, EXCLUDES_FIELD)) {
List<String> excludesList = new ArrayList<>(); List<String> excludesList = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.VALUE_STRING) { if (token == XContentParser.Token.VALUE_STRING) {
@ -197,10 +209,13 @@ public class FetchSourceContext implements Streamable, ToXContent {
+ " in [" + currentFieldName + "].", parser.getTokenLocation()); + " in [" + currentFieldName + "].", parser.getTokenLocation());
} }
} else if (token == XContentParser.Token.VALUE_STRING) { } else if (token == XContentParser.Token.VALUE_STRING) {
if (context.getParseFieldMatcher().match(currentFieldName, INCLUDES_FIELD)) { if (parseFieldMatcher.match(currentFieldName, INCLUDES_FIELD)) {
includes = new String[] {parser.text()}; includes = new String[] {parser.text()};
} else if (context.getParseFieldMatcher().match(currentFieldName, EXCLUDES_FIELD)) { } else if (parseFieldMatcher.match(currentFieldName, EXCLUDES_FIELD)) {
excludes = new String[] {parser.text()}; excludes = new String[] {parser.text()};
} else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token
+ " in [" + currentFieldName + "].", parser.getTokenLocation());
} }
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
@ -229,22 +244,6 @@ public class FetchSourceContext implements Streamable, ToXContent {
return builder; return builder;
} }
@Override
public void readFrom(StreamInput in) throws IOException {
fetchSource = in.readBoolean();
includes = in.readStringArray();
excludes = in.readStringArray();
in.readBoolean(); // Used to be transformSource but that was dropped in 2.1
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(fetchSource);
out.writeStringArray(includes);
out.writeStringArray(excludes);
out.writeBoolean(false); // Used to be transformSource but that was dropped in 2.1
}
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) return true; if (this == o) return true;

View File

@ -27,6 +27,7 @@ import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.client.Requests; import org.elasticsearch.client.Requests;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
@ -39,6 +40,7 @@ import java.util.Map;
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
@ -125,49 +127,34 @@ public class BulkRequestTests extends ESTestCase {
public void testSimpleBulk6() throws Exception { public void testSimpleBulk6() throws Exception {
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk6.json"); String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk6.json");
BulkRequest bulkRequest = new BulkRequest(); BulkRequest bulkRequest = new BulkRequest();
try { ParsingException exc = expectThrows(ParsingException.class,
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null); () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null));
fail("should have thrown an exception about the wrong format of line 1"); assertThat(exc.getMessage(), containsString("Unknown key for a VALUE_STRING in [hello]"));
} catch (IllegalArgumentException e) {
assertThat("message contains error about the wrong format of line 1: " + e.getMessage(),
e.getMessage().contains("Malformed action/metadata line [1], expected a simple value for field [_source] but found [START_OBJECT]"), equalTo(true));
}
} }
public void testSimpleBulk7() throws Exception { public void testSimpleBulk7() throws Exception {
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk7.json"); String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk7.json");
BulkRequest bulkRequest = new BulkRequest(); BulkRequest bulkRequest = new BulkRequest();
try { IllegalArgumentException exc = expectThrows(IllegalArgumentException.class,
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null); () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null));
fail("should have thrown an exception about the wrong format of line 5"); assertThat(exc.getMessage(),
} catch (IllegalArgumentException e) { containsString("Malformed action/metadata line [5], expected a simple value for field [_unkown] but found [START_ARRAY]"));
assertThat("message contains error about the wrong format of line 5: " + e.getMessage(),
e.getMessage().contains("Malformed action/metadata line [5], expected a simple value for field [_unkown] but found [START_ARRAY]"), equalTo(true));
}
} }
public void testSimpleBulk8() throws Exception { public void testSimpleBulk8() throws Exception {
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk8.json"); String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk8.json");
BulkRequest bulkRequest = new BulkRequest(); BulkRequest bulkRequest = new BulkRequest();
try { IllegalArgumentException exc = expectThrows(IllegalArgumentException.class,
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null); () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null));
fail("should have thrown an exception about the unknown parameter _foo"); assertThat(exc.getMessage(), containsString("Action/metadata line [3] contains an unknown parameter [_foo]"));
} catch (IllegalArgumentException e) {
assertThat("message contains error about the unknown parameter _foo: " + e.getMessage(),
e.getMessage().contains("Action/metadata line [3] contains an unknown parameter [_foo]"), equalTo(true));
}
} }
public void testSimpleBulk9() throws Exception { public void testSimpleBulk9() throws Exception {
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk9.json"); String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk9.json");
BulkRequest bulkRequest = new BulkRequest(); BulkRequest bulkRequest = new BulkRequest();
try { IllegalArgumentException exc = expectThrows(IllegalArgumentException.class,
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null); () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null));
fail("should have thrown an exception about the wrong format of line 3"); assertThat(exc.getMessage(), containsString("Malformed action/metadata line [3], expected START_OBJECT or END_OBJECT but found [START_ARRAY]"));
} catch (IllegalArgumentException e) {
assertThat("message contains error about the wrong format of line 3: " + e.getMessage(),
e.getMessage().contains("Malformed action/metadata line [3], expected START_OBJECT or END_OBJECT but found [START_ARRAY]"), equalTo(true));
}
} }
public void testSimpleBulk10() throws Exception { public void testSimpleBulk10() throws Exception {

View File

@ -295,7 +295,8 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
for (int i = 0; i < numDocs; i++) { for (int i = 0; i < numDocs; i++) {
builder.add( builder.add(
client().prepareUpdate() client().prepareUpdate()
.setIndex("test").setType("type1").setId(Integer.toString(i)).setFields("counter") .setIndex("test").setType("type1").setId(Integer.toString(i))
.setFields("counter")
.setScript(script) .setScript(script)
.setUpsert(jsonBuilder().startObject().field("counter", 1).endObject())); .setUpsert(jsonBuilder().startObject().field("counter", 1).endObject()));
} }
@ -405,8 +406,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
assertThat(response.getItems()[i].getType(), equalTo("type1")); assertThat(response.getItems()[i].getType(), equalTo("type1"));
assertThat(response.getItems()[i].getOpType(), equalTo("update")); assertThat(response.getItems()[i].getOpType(), equalTo("update"));
for (int j = 0; j < 5; j++) { for (int j = 0; j < 5; j++) {
GetResponse getResponse = client().prepareGet("test", "type1", Integer.toString(i)).setFields("counter").execute() GetResponse getResponse = client().prepareGet("test", "type1", Integer.toString(i)).get();
.actionGet();
assertThat(getResponse.isExists(), equalTo(false)); assertThat(getResponse.isExists(), equalTo(false));
} }
} }

View File

@ -52,7 +52,7 @@ public class MultiGetShardRequestTests extends ESTestCase {
for (int j = 0; j < fields.length; j++) { for (int j = 0; j < fields.length; j++) {
fields[j] = randomAsciiOfLength(randomIntBetween(1, 10)); fields[j] = randomAsciiOfLength(randomIntBetween(1, 10));
} }
item.fields(fields); item.storedFields(fields);
} }
if (randomBoolean()) { if (randomBoolean()) {
item.version(randomIntBetween(1, Integer.MAX_VALUE)); item.version(randomIntBetween(1, Integer.MAX_VALUE));
@ -84,7 +84,7 @@ public class MultiGetShardRequestTests extends ESTestCase {
assertThat(item2.index(), equalTo(item.index())); assertThat(item2.index(), equalTo(item.index()));
assertThat(item2.type(), equalTo(item.type())); assertThat(item2.type(), equalTo(item.type()));
assertThat(item2.id(), equalTo(item.id())); assertThat(item2.id(), equalTo(item.id()));
assertThat(item2.fields(), equalTo(item.fields())); assertThat(item2.storedFields(), equalTo(item.storedFields()));
assertThat(item2.version(), equalTo(item.version())); assertThat(item2.version(), equalTo(item.version()));
assertThat(item2.versionType(), equalTo(item.versionType())); assertThat(item2.versionType(), equalTo(item.versionType()));
assertThat(item2.fetchSourceContext(), equalTo(item.fetchSourceContext())); assertThat(item2.fetchSourceContext(), equalTo(item.fetchSourceContext()));

View File

@ -48,7 +48,7 @@ public class UpdateRequestTests extends ESTestCase {
public void testUpdateRequest() throws Exception { public void testUpdateRequest() throws Exception {
UpdateRequest request = new UpdateRequest("test", "type", "1"); UpdateRequest request = new UpdateRequest("test", "type", "1");
// simple script // simple script
request.source(XContentFactory.jsonBuilder().startObject() request.fromXContent(XContentFactory.jsonBuilder().startObject()
.field("script", "script1") .field("script", "script1")
.endObject()); .endObject());
Script script = request.script(); Script script = request.script();
@ -60,7 +60,7 @@ public class UpdateRequestTests extends ESTestCase {
assertThat(params, nullValue()); assertThat(params, nullValue());
// simple verbose script // simple verbose script
request.source(XContentFactory.jsonBuilder().startObject() request.fromXContent(XContentFactory.jsonBuilder().startObject()
.startObject("script").field("inline", "script1").endObject() .startObject("script").field("inline", "script1").endObject()
.endObject()); .endObject());
script = request.script(); script = request.script();
@ -73,8 +73,13 @@ public class UpdateRequestTests extends ESTestCase {
// script with params // script with params
request = new UpdateRequest("test", "type", "1"); request = new UpdateRequest("test", "type", "1");
request.source(XContentFactory.jsonBuilder().startObject().startObject("script").field("inline", "script1").startObject("params") request.fromXContent(XContentFactory.jsonBuilder().startObject()
.field("param1", "value1").endObject().endObject().endObject()); .startObject("script")
.field("inline", "script1")
.startObject("params")
.field("param1", "value1")
.endObject()
.endObject().endObject());
script = request.script(); script = request.script();
assertThat(script, notNullValue()); assertThat(script, notNullValue());
assertThat(script.getScript(), equalTo("script1")); assertThat(script.getScript(), equalTo("script1"));
@ -86,8 +91,9 @@ public class UpdateRequestTests extends ESTestCase {
assertThat(params.get("param1").toString(), equalTo("value1")); assertThat(params.get("param1").toString(), equalTo("value1"));
request = new UpdateRequest("test", "type", "1"); request = new UpdateRequest("test", "type", "1");
request.source(XContentFactory.jsonBuilder().startObject().startObject("script").startObject("params").field("param1", "value1") request.fromXContent(XContentFactory.jsonBuilder().startObject().startObject("script")
.endObject().field("inline", "script1").endObject().endObject()); .startObject("params").field("param1", "value1").endObject()
.field("inline", "script1").endObject().endObject());
script = request.script(); script = request.script();
assertThat(script, notNullValue()); assertThat(script, notNullValue());
assertThat(script.getScript(), equalTo("script1")); assertThat(script.getScript(), equalTo("script1"));
@ -100,9 +106,19 @@ public class UpdateRequestTests extends ESTestCase {
// script with params and upsert // script with params and upsert
request = new UpdateRequest("test", "type", "1"); request = new UpdateRequest("test", "type", "1");
request.source(XContentFactory.jsonBuilder().startObject().startObject("script").startObject("params").field("param1", "value1") request.fromXContent(XContentFactory.jsonBuilder().startObject()
.endObject().field("inline", "script1").endObject().startObject("upsert").field("field1", "value1").startObject("compound") .startObject("script")
.field("field2", "value2").endObject().endObject().endObject()); .startObject("params")
.field("param1", "value1")
.endObject()
.field("inline", "script1")
.endObject()
.startObject("upsert")
.field("field1", "value1")
.startObject("compound")
.field("field2", "value2")
.endObject()
.endObject().endObject());
script = request.script(); script = request.script();
assertThat(script, notNullValue()); assertThat(script, notNullValue());
assertThat(script.getScript(), equalTo("script1")); assertThat(script.getScript(), equalTo("script1"));
@ -117,9 +133,19 @@ public class UpdateRequestTests extends ESTestCase {
assertThat(((Map) upsertDoc.get("compound")).get("field2").toString(), equalTo("value2")); assertThat(((Map) upsertDoc.get("compound")).get("field2").toString(), equalTo("value2"));
request = new UpdateRequest("test", "type", "1"); request = new UpdateRequest("test", "type", "1");
request.source(XContentFactory.jsonBuilder().startObject().startObject("upsert").field("field1", "value1").startObject("compound") request.fromXContent(XContentFactory.jsonBuilder().startObject()
.field("field2", "value2").endObject().endObject().startObject("script").startObject("params").field("param1", "value1") .startObject("upsert")
.endObject().field("inline", "script1").endObject().endObject()); .field("field1", "value1")
.startObject("compound")
.field("field2", "value2")
.endObject()
.endObject()
.startObject("script")
.startObject("params")
.field("param1", "value1")
.endObject()
.field("inline", "script1")
.endObject().endObject());
script = request.script(); script = request.script();
assertThat(script, notNullValue()); assertThat(script, notNullValue());
assertThat(script.getScript(), equalTo("script1")); assertThat(script.getScript(), equalTo("script1"));
@ -135,8 +161,9 @@ public class UpdateRequestTests extends ESTestCase {
// script with doc // script with doc
request = new UpdateRequest("test", "type", "1"); request = new UpdateRequest("test", "type", "1");
request.source(XContentFactory.jsonBuilder().startObject().startObject("doc").field("field1", "value1").startObject("compound") request.fromXContent(XContentFactory.jsonBuilder().startObject()
.field("field2", "value2").endObject().endObject().endObject()); .startObject("doc").field("field1", "value1").startObject("compound")
.field("field2", "value2").endObject().endObject().endObject());
Map<String, Object> doc = request.doc().sourceAsMap(); Map<String, Object> doc = request.doc().sourceAsMap();
assertThat(doc.get("field1").toString(), equalTo("value1")); assertThat(doc.get("field1").toString(), equalTo("value1"));
assertThat(((Map) doc.get("compound")).get("field2").toString(), equalTo("value2")); assertThat(((Map) doc.get("compound")).get("field2").toString(), equalTo("value2"));
@ -187,7 +214,7 @@ public class UpdateRequestTests extends ESTestCase {
public void testInvalidBodyThrowsParseException() throws Exception { public void testInvalidBodyThrowsParseException() throws Exception {
UpdateRequest request = new UpdateRequest("test", "type", "1"); UpdateRequest request = new UpdateRequest("test", "type", "1");
try { try {
request.source(new byte[] { (byte) '"' }); request.fromXContent(new byte[] { (byte) '"' });
fail("Should have thrown a ElasticsearchParseException"); fail("Should have thrown a ElasticsearchParseException");
} catch (ElasticsearchParseException e) { } catch (ElasticsearchParseException e) {
assertThat(e.getMessage(), equalTo("Failed to derive xcontent")); assertThat(e.getMessage(), equalTo("Failed to derive xcontent"));
@ -197,13 +224,56 @@ public class UpdateRequestTests extends ESTestCase {
// Related to issue 15338 // Related to issue 15338
public void testFieldsParsing() throws Exception { public void testFieldsParsing() throws Exception {
UpdateRequest request = new UpdateRequest("test", "type1", "1") UpdateRequest request = new UpdateRequest("test", "type1", "1")
.source(new BytesArray("{\"doc\": {\"field1\": \"value1\"}, \"fields\": \"_source\"}")); .fromXContent(new BytesArray("{\"doc\": {\"field1\": \"value1\"}, \"fields\": \"_source\"}"));
assertThat(request.doc().sourceAsMap().get("field1").toString(), equalTo("value1")); assertThat(request.doc().sourceAsMap().get("field1").toString(), equalTo("value1"));
assertThat(request.fields(), arrayContaining("_source")); assertThat(request.fields(), arrayContaining("_source"));
request = new UpdateRequest("test", "type2", "2") request = new UpdateRequest("test", "type2", "2")
.source(new BytesArray("{\"doc\": {\"field2\": \"value2\"}, \"fields\": [\"field1\", \"field2\"]}")); .fromXContent(new BytesArray("{\"doc\": {\"field2\": \"value2\"}, \"fields\": [\"field1\", \"field2\"]}"));
assertThat(request.doc().sourceAsMap().get("field2").toString(), equalTo("value2")); assertThat(request.doc().sourceAsMap().get("field2").toString(), equalTo("value2"));
assertThat(request.fields(), arrayContaining("field1", "field2")); assertThat(request.fields(), arrayContaining("field1", "field2"));
} }
public void testFetchSourceParsing() throws Exception {
UpdateRequest request = new UpdateRequest("test", "type1", "1");
request.fromXContent(
XContentFactory.jsonBuilder().startObject().field("_source", true).endObject()
);
assertThat(request.fetchSource(), notNullValue());
assertThat(request.fetchSource().includes().length, equalTo(0));
assertThat(request.fetchSource().excludes().length, equalTo(0));
assertThat(request.fetchSource().fetchSource(), equalTo(true));
request.fromXContent(
XContentFactory.jsonBuilder().startObject().field("_source", false).endObject()
);
assertThat(request.fetchSource(), notNullValue());
assertThat(request.fetchSource().includes().length, equalTo(0));
assertThat(request.fetchSource().excludes().length, equalTo(0));
assertThat(request.fetchSource().fetchSource(), equalTo(false));
request.fromXContent(
XContentFactory.jsonBuilder().startObject().field("_source", "path.inner.*").endObject()
);
assertThat(request.fetchSource(), notNullValue());
assertThat(request.fetchSource().fetchSource(), equalTo(true));
assertThat(request.fetchSource().includes().length, equalTo(1));
assertThat(request.fetchSource().excludes().length, equalTo(0));
assertThat(request.fetchSource().includes()[0], equalTo("path.inner.*"));
request.fromXContent(
XContentFactory.jsonBuilder().startObject()
.startObject("_source")
.field("includes", "path.inner.*")
.field("excludes", "another.inner.*")
.endObject()
.endObject()
);
assertThat(request.fetchSource(), notNullValue());
assertThat(request.fetchSource().fetchSource(), equalTo(true));
assertThat(request.fetchSource().includes().length, equalTo(1));
assertThat(request.fetchSource().excludes().length, equalTo(1));
assertThat(request.fetchSource().includes()[0], equalTo("path.inner.*"));
assertThat(request.fetchSource().excludes()[0], equalTo("another.inner.*"));
}
} }

View File

@ -103,7 +103,7 @@ public class DocumentActionsIT extends ESIntegTestCase {
logger.info("Get [type1/1] with script"); logger.info("Get [type1/1] with script");
for (int i = 0; i < 5; i++) { for (int i = 0; i < 5; i++) {
getResult = client().prepareGet("test", "type1", "1").setFields("name").execute().actionGet(); getResult = client().prepareGet("test", "type1", "1").setStoredFields("name").execute().actionGet();
assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName()));
assertThat(getResult.isExists(), equalTo(true)); assertThat(getResult.isExists(), equalTo(true));
assertThat(getResult.getSourceAsBytes(), nullValue()); assertThat(getResult.getSourceAsBytes(), nullValue());

View File

@ -131,7 +131,7 @@ public class ExplainActionIT extends ESIntegTestCase {
refresh(); refresh();
ExplainResponse response = client().prepareExplain(indexOrAlias(), "test", "1") ExplainResponse response = client().prepareExplain(indexOrAlias(), "test", "1")
.setQuery(QueryBuilders.matchAllQuery()) .setQuery(QueryBuilders.matchAllQuery())
.setFields("obj1.field1").get(); .setStoredFields("obj1.field1").get();
assertNotNull(response); assertNotNull(response);
assertTrue(response.isMatch()); assertTrue(response.isMatch());
assertNotNull(response.getExplanation()); assertNotNull(response.getExplanation());
@ -148,7 +148,7 @@ public class ExplainActionIT extends ESIntegTestCase {
refresh(); refresh();
response = client().prepareExplain(indexOrAlias(), "test", "1") response = client().prepareExplain(indexOrAlias(), "test", "1")
.setQuery(QueryBuilders.matchAllQuery()) .setQuery(QueryBuilders.matchAllQuery())
.setFields("obj1.field1").setFetchSource(true).get(); .setStoredFields("obj1.field1").setFetchSource(true).get();
assertNotNull(response); assertNotNull(response);
assertTrue(response.isMatch()); assertTrue(response.isMatch());
assertNotNull(response.getExplanation()); assertNotNull(response.getExplanation());
@ -164,7 +164,7 @@ public class ExplainActionIT extends ESIntegTestCase {
response = client().prepareExplain(indexOrAlias(), "test", "1") response = client().prepareExplain(indexOrAlias(), "test", "1")
.setQuery(QueryBuilders.matchAllQuery()) .setQuery(QueryBuilders.matchAllQuery())
.setFields("obj1.field1", "obj1.field2").get(); .setStoredFields("obj1.field1", "obj1.field2").get();
assertNotNull(response); assertNotNull(response);
assertTrue(response.isMatch()); assertTrue(response.isMatch());
String v1 = (String) response.getGetResult().field("obj1.field1").getValue(); String v1 = (String) response.getGetResult().field("obj1.field1").getValue();

View File

@ -84,7 +84,7 @@ public class GetActionIT extends ESIntegTestCase {
assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2"));
logger.info("--> realtime get 1 (no source, implicit)"); logger.info("--> realtime get 1 (no source, implicit)");
response = client().prepareGet(indexOrAlias(), "type1", "1").setFields(Strings.EMPTY_ARRAY).get(); response = client().prepareGet(indexOrAlias(), "type1", "1").setStoredFields(Strings.EMPTY_ARRAY).get();
assertThat(response.isExists(), equalTo(true)); assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test")); assertThat(response.getIndex(), equalTo("test"));
Set<String> fields = new HashSet<>(response.getFields().keySet()); Set<String> fields = new HashSet<>(response.getFields().keySet());
@ -109,7 +109,7 @@ public class GetActionIT extends ESIntegTestCase {
assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2"));
logger.info("--> realtime fetch of field"); logger.info("--> realtime fetch of field");
response = client().prepareGet(indexOrAlias(), "type1", "1").setFields("field1").get(); response = client().prepareGet(indexOrAlias(), "type1", "1").setStoredFields("field1").get();
assertThat(response.isExists(), equalTo(true)); assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test")); assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsBytes(), nullValue()); assertThat(response.getSourceAsBytes(), nullValue());
@ -117,7 +117,8 @@ public class GetActionIT extends ESIntegTestCase {
assertThat(response.getField("field2"), nullValue()); assertThat(response.getField("field2"), nullValue());
logger.info("--> realtime fetch of field & source"); logger.info("--> realtime fetch of field & source");
response = client().prepareGet(indexOrAlias(), "type1", "1").setFields("field1").setFetchSource("field1", null).get(); response = client().prepareGet(indexOrAlias(), "type1", "1")
.setStoredFields("field1").setFetchSource("field1", null).get();
assertThat(response.isExists(), equalTo(true)); assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test")); assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsMap(), hasKey("field1")); assertThat(response.getSourceAsMap(), hasKey("field1"));
@ -143,7 +144,7 @@ public class GetActionIT extends ESIntegTestCase {
assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2"));
logger.info("--> realtime fetch of field (loaded from index)"); logger.info("--> realtime fetch of field (loaded from index)");
response = client().prepareGet(indexOrAlias(), "type1", "1").setFields("field1").get(); response = client().prepareGet(indexOrAlias(), "type1", "1").setStoredFields("field1").get();
assertThat(response.isExists(), equalTo(true)); assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test")); assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsBytes(), nullValue()); assertThat(response.getSourceAsBytes(), nullValue());
@ -151,7 +152,8 @@ public class GetActionIT extends ESIntegTestCase {
assertThat(response.getField("field2"), nullValue()); assertThat(response.getField("field2"), nullValue());
logger.info("--> realtime fetch of field & source (loaded from index)"); logger.info("--> realtime fetch of field & source (loaded from index)");
response = client().prepareGet(indexOrAlias(), "type1", "1").setFields("field1").setFetchSource(true).get(); response = client().prepareGet(indexOrAlias(), "type1", "1")
.setStoredFields("field1").setFetchSource(true).get();
assertThat(response.isExists(), equalTo(true)); assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test")); assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsBytes(), not(nullValue())); assertThat(response.getSourceAsBytes(), not(nullValue()));
@ -232,8 +234,8 @@ public class GetActionIT extends ESIntegTestCase {
// multi get with specific field // multi get with specific field
response = client().prepareMultiGet() response = client().prepareMultiGet()
.add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").fields("field")) .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").storedFields("field"))
.add(new MultiGetRequest.Item(indexOrAlias(), "type1", "3").fields("field")) .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "3").storedFields("field"))
.get(); .get();
assertThat(response.getResponses().length, equalTo(2)); assertThat(response.getResponses().length, equalTo(2));
@ -269,7 +271,7 @@ public class GetActionIT extends ESIntegTestCase {
client().prepareIndex("test", "type2", "1") client().prepareIndex("test", "type2", "1")
.setSource(jsonBuilder().startObject().array("field", "1", "2").endObject()).get(); .setSource(jsonBuilder().startObject().array("field", "1", "2").endObject()).get();
response = client().prepareGet("test", "type1", "1").setFields("field").get(); response = client().prepareGet("test", "type1", "1").setStoredFields("field").get();
assertThat(response.isExists(), equalTo(true)); assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1")); assertThat(response.getId(), equalTo("1"));
assertThat(response.getType(), equalTo("type1")); assertThat(response.getType(), equalTo("type1"));
@ -281,7 +283,7 @@ public class GetActionIT extends ESIntegTestCase {
assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2")); assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2"));
response = client().prepareGet("test", "type2", "1").setFields("field").get(); response = client().prepareGet("test", "type2", "1").setStoredFields("field").get();
assertThat(response.isExists(), equalTo(true)); assertThat(response.isExists(), equalTo(true));
assertThat(response.getType(), equalTo("type2")); assertThat(response.getType(), equalTo("type2"));
assertThat(response.getId(), equalTo("1")); assertThat(response.getId(), equalTo("1"));
@ -294,7 +296,7 @@ public class GetActionIT extends ESIntegTestCase {
// Now test values being fetched from stored fields. // Now test values being fetched from stored fields.
refresh(); refresh();
response = client().prepareGet("test", "type1", "1").setFields("field").get(); response = client().prepareGet("test", "type1", "1").setStoredFields("field").get();
assertThat(response.isExists(), equalTo(true)); assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1")); assertThat(response.getId(), equalTo("1"));
fields = new HashSet<>(response.getFields().keySet()); fields = new HashSet<>(response.getFields().keySet());
@ -304,7 +306,7 @@ public class GetActionIT extends ESIntegTestCase {
assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1")); assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1"));
assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2")); assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2"));
response = client().prepareGet("test", "type2", "1").setFields("field").get(); response = client().prepareGet("test", "type2", "1").setStoredFields("field").get();
assertThat(response.isExists(), equalTo(true)); assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1")); assertThat(response.getId(), equalTo("1"));
fields = new HashSet<>(response.getFields().keySet()); fields = new HashSet<>(response.getFields().keySet());
@ -546,7 +548,7 @@ public class GetActionIT extends ESIntegTestCase {
GetResponse getResponse = client().prepareGet(indexOrAlias(), "my-type1", "1") GetResponse getResponse = client().prepareGet(indexOrAlias(), "my-type1", "1")
.setRouting("1") .setRouting("1")
.setFields("field1") .setStoredFields("field1")
.get(); .get();
assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getField("field1").isMetadataField(), equalTo(false)); assertThat(getResponse.getField("field1").isMetadataField(), equalTo(false));
@ -559,7 +561,7 @@ public class GetActionIT extends ESIntegTestCase {
flush(); flush();
getResponse = client().prepareGet(indexOrAlias(), "my-type1", "1") getResponse = client().prepareGet(indexOrAlias(), "my-type1", "1")
.setFields("field1") .setStoredFields("field1")
.setRouting("1") .setRouting("1")
.get(); .get();
assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.isExists(), equalTo(true));
@ -584,21 +586,18 @@ public class GetActionIT extends ESIntegTestCase {
.setSource(jsonBuilder().startObject().startObject("field1").field("field2", "value1").endObject().endObject()) .setSource(jsonBuilder().startObject().startObject("field1").field("field2", "value1").endObject().endObject())
.get(); .get();
try {
client().prepareGet(indexOrAlias(), "my-type1", "1").setFields("field1").get(); IllegalArgumentException exc =
fail(); expectThrows(IllegalArgumentException.class,
} catch (IllegalArgumentException e) { () -> client().prepareGet(indexOrAlias(), "my-type1", "1").setStoredFields("field1").get());
//all well assertThat(exc.getMessage(), equalTo("field [field1] isn't a leaf field"));
}
flush(); flush();
try { exc =
client().prepareGet(indexOrAlias(), "my-type1", "1").setFields("field1").get(); expectThrows(IllegalArgumentException.class,
fail(); () -> client().prepareGet(indexOrAlias(), "my-type1", "1").setStoredFields("field1").get());
} catch (IllegalArgumentException e) { assertThat(exc.getMessage(), equalTo("field [field1] isn't a leaf field"));
//all well
}
} }
public void testGetFieldsComplexField() throws Exception { public void testGetFieldsComplexField() throws Exception {
@ -645,14 +644,14 @@ public class GetActionIT extends ESIntegTestCase {
logger.info("checking real time retrieval"); logger.info("checking real time retrieval");
String field = "field1.field2.field3.field4"; String field = "field1.field2.field3.field4";
GetResponse getResponse = client().prepareGet("my-index", "my-type1", "1").setFields(field).get(); GetResponse getResponse = client().prepareGet("my-index", "my-type1", "1").setStoredFields(field).get();
assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getField(field).isMetadataField(), equalTo(false)); assertThat(getResponse.getField(field).isMetadataField(), equalTo(false));
assertThat(getResponse.getField(field).getValues().size(), equalTo(2)); assertThat(getResponse.getField(field).getValues().size(), equalTo(2));
assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1")); assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1"));
assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2")); assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2"));
getResponse = client().prepareGet("my-index", "my-type2", "1").setFields(field).get(); getResponse = client().prepareGet("my-index", "my-type2", "1").setStoredFields(field).get();
assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getField(field).isMetadataField(), equalTo(false)); assertThat(getResponse.getField(field).isMetadataField(), equalTo(false));
assertThat(getResponse.getField(field).getValues().size(), equalTo(2)); assertThat(getResponse.getField(field).getValues().size(), equalTo(2));
@ -677,14 +676,14 @@ public class GetActionIT extends ESIntegTestCase {
logger.info("checking post-flush retrieval"); logger.info("checking post-flush retrieval");
getResponse = client().prepareGet("my-index", "my-type1", "1").setFields(field).get(); getResponse = client().prepareGet("my-index", "my-type1", "1").setStoredFields(field).get();
assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getField(field).isMetadataField(), equalTo(false)); assertThat(getResponse.getField(field).isMetadataField(), equalTo(false));
assertThat(getResponse.getField(field).getValues().size(), equalTo(2)); assertThat(getResponse.getField(field).getValues().size(), equalTo(2));
assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1")); assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1"));
assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2")); assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2"));
getResponse = client().prepareGet("my-index", "my-type2", "1").setFields(field).get(); getResponse = client().prepareGet("my-index", "my-type2", "1").setStoredFields(field).get();
assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getField(field).isMetadataField(), equalTo(false)); assertThat(getResponse.getField(field).isMetadataField(), equalTo(false));
assertThat(getResponse.getField(field).getValues().size(), equalTo(2)); assertThat(getResponse.getField(field).getValues().size(), equalTo(2));
@ -711,7 +710,7 @@ public class GetActionIT extends ESIntegTestCase {
index("test", "my-type1", "1", "some_field", "some text"); index("test", "my-type1", "1", "some_field", "some text");
refresh(); refresh();
GetResponse getResponse = client().prepareGet(indexOrAlias(), "my-type1", "1").setFields("_all").get(); GetResponse getResponse = client().prepareGet(indexOrAlias(), "my-type1", "1").setStoredFields("_all").get();
assertNotNull(getResponse.getField("_all").getValue()); assertNotNull(getResponse.getField("_all").getValue());
assertThat(getResponse.getField("_all").getValue().toString(), equalTo("some text")); assertThat(getResponse.getField("_all").getValue().toString(), equalTo("some text"));
} }
@ -948,12 +947,12 @@ public class GetActionIT extends ESIntegTestCase {
private void assertGetFieldException(String index, String type, String docId, String field) { private void assertGetFieldException(String index, String type, String docId, String field) {
try { try {
client().prepareGet().setIndex(index).setType(type).setId(docId).setFields(field).get(); client().prepareGet().setIndex(index).setType(type).setId(docId).setStoredFields(field);
fail(); fail();
} catch (ElasticsearchException e) { } catch (ElasticsearchException e) {
assertTrue(e.getMessage().contains("You can only get this field after refresh() has been called.")); assertTrue(e.getMessage().contains("You can only get this field after refresh() has been called."));
} }
MultiGetResponse multiGetResponse = client().prepareMultiGet().add(new MultiGetRequest.Item(index, type, docId).fields(field)).get(); MultiGetResponse multiGetResponse = client().prepareMultiGet().add(new MultiGetRequest.Item(index, type, docId).storedFields(field)).get();
assertNull(multiGetResponse.getResponses()[0].getResponse()); assertNull(multiGetResponse.getResponses()[0].getResponse());
assertTrue(multiGetResponse.getResponses()[0].getFailure().getMessage().contains("You can only get this field after refresh() has been called.")); assertTrue(multiGetResponse.getResponses()[0].getFailure().getMessage().contains("You can only get this field after refresh() has been called."));
} }
@ -993,7 +992,7 @@ public class GetActionIT extends ESIntegTestCase {
} }
private GetResponse multiGetDocument(String index, String type, String docId, String field, @Nullable String routing) { private GetResponse multiGetDocument(String index, String type, String docId, String field, @Nullable String routing) {
MultiGetRequest.Item getItem = new MultiGetRequest.Item(index, type, docId).fields(field); MultiGetRequest.Item getItem = new MultiGetRequest.Item(index, type, docId).storedFields(field);
if (routing != null) { if (routing != null) {
getItem.routing(routing); getItem.routing(routing);
} }
@ -1004,7 +1003,7 @@ public class GetActionIT extends ESIntegTestCase {
} }
private GetResponse getDocument(String index, String type, String docId, String field, @Nullable String routing) { private GetResponse getDocument(String index, String type, String docId, String field, @Nullable String routing) {
GetRequestBuilder getRequestBuilder = client().prepareGet().setIndex(index).setType(type).setId(docId).setFields(field); GetRequestBuilder getRequestBuilder = client().prepareGet().setIndex(index).setType(type).setId(docId).setStoredFields(field);
if (routing != null) { if (routing != null) {
getRequestBuilder.setRouting(routing); getRequestBuilder.setRouting(routing);
} }

View File

@ -69,33 +69,33 @@ public class SimpleTimestampIT extends ESIntegTestCase {
long now2 = System.currentTimeMillis(); long now2 = System.currentTimeMillis();
// non realtime get (stored) // non realtime get (stored)
GetResponse getResponse = client().prepareGet("test", "type1", "1").setFields("_timestamp").setRealtime(randomBoolean()).execute().actionGet(); GetResponse getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(randomBoolean()).execute().actionGet();
long timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue(); long timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue();
assertThat(timestamp, greaterThanOrEqualTo(now1)); assertThat(timestamp, greaterThanOrEqualTo(now1));
assertThat(timestamp, lessThanOrEqualTo(now2)); assertThat(timestamp, lessThanOrEqualTo(now2));
// verify its the same timestamp when going the replica // verify its the same timestamp when going the replica
getResponse = client().prepareGet("test", "type1", "1").setFields("_timestamp").setRealtime(randomBoolean()).execute().actionGet(); getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(randomBoolean()).execute().actionGet();
assertThat(((Number) getResponse.getField("_timestamp").getValue()).longValue(), equalTo(timestamp)); assertThat(((Number) getResponse.getField("_timestamp").getValue()).longValue(), equalTo(timestamp));
logger.info("--> check with custom timestamp (numeric)"); logger.info("--> check with custom timestamp (numeric)");
client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setTimestamp("10").setRefreshPolicy(IMMEDIATE).get(); client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setTimestamp("10").setRefreshPolicy(IMMEDIATE).get();
getResponse = client().prepareGet("test", "type1", "1").setFields("_timestamp").setRealtime(false).execute().actionGet(); getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(false).execute().actionGet();
timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue(); timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue();
assertThat(timestamp, equalTo(10L)); assertThat(timestamp, equalTo(10L));
// verify its the same timestamp when going the replica // verify its the same timestamp when going the replica
getResponse = client().prepareGet("test", "type1", "1").setFields("_timestamp").setRealtime(false).execute().actionGet(); getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(false).execute().actionGet();
assertThat(((Number) getResponse.getField("_timestamp").getValue()).longValue(), equalTo(timestamp)); assertThat(((Number) getResponse.getField("_timestamp").getValue()).longValue(), equalTo(timestamp));
logger.info("--> check with custom timestamp (string)"); logger.info("--> check with custom timestamp (string)");
client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setTimestamp("1970-01-01T00:00:00.020") client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setTimestamp("1970-01-01T00:00:00.020")
.setRefreshPolicy(IMMEDIATE).get(); .setRefreshPolicy(IMMEDIATE).get();
getResponse = client().prepareGet("test", "type1", "1").setFields("_timestamp").setRealtime(false).execute().actionGet(); getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(false).execute().actionGet();
timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue(); timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue();
assertThat(timestamp, equalTo(20L)); assertThat(timestamp, equalTo(20L));
// verify its the same timestamp when going the replica // verify its the same timestamp when going the replica
getResponse = client().prepareGet("test", "type1", "1").setFields("_timestamp").setRealtime(false).execute().actionGet(); getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(false).execute().actionGet();
assertThat(((Number) getResponse.getField("_timestamp").getValue()).longValue(), equalTo(timestamp)); assertThat(((Number) getResponse.getField("_timestamp").getValue()).longValue(), equalTo(timestamp));
} }

View File

@ -117,7 +117,7 @@ public class SimpleTTLIT extends ESIntegTestCase {
// realtime get check // realtime get check
long currentTime = System.currentTimeMillis(); long currentTime = System.currentTimeMillis();
GetResponse getResponse = client().prepareGet("test", "type1", "1").setFields("_ttl").get(); GetResponse getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").get();
long ttl0; long ttl0;
if (getResponse.isExists()) { if (getResponse.isExists()) {
ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue(); ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue();
@ -127,7 +127,7 @@ public class SimpleTTLIT extends ESIntegTestCase {
} }
// verify the ttl is still decreasing when going to the replica // verify the ttl is still decreasing when going to the replica
currentTime = System.currentTimeMillis(); currentTime = System.currentTimeMillis();
getResponse = client().prepareGet("test", "type1", "1").setFields("_ttl").get(); getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").get();
if (getResponse.isExists()) { if (getResponse.isExists()) {
ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue(); ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue();
assertThat(ttl0, lessThanOrEqualTo(providedTTLValue - (currentTime - now))); assertThat(ttl0, lessThanOrEqualTo(providedTTLValue - (currentTime - now)));
@ -136,7 +136,7 @@ public class SimpleTTLIT extends ESIntegTestCase {
} }
// non realtime get (stored) // non realtime get (stored)
currentTime = System.currentTimeMillis(); currentTime = System.currentTimeMillis();
getResponse = client().prepareGet("test", "type1", "1").setFields("_ttl").setRealtime(false).get(); getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(false).get();
if (getResponse.isExists()) { if (getResponse.isExists()) {
ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue(); ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue();
assertThat(ttl0, lessThanOrEqualTo(providedTTLValue - (currentTime - now))); assertThat(ttl0, lessThanOrEqualTo(providedTTLValue - (currentTime - now)));
@ -145,7 +145,7 @@ public class SimpleTTLIT extends ESIntegTestCase {
} }
// non realtime get going the replica // non realtime get going the replica
currentTime = System.currentTimeMillis(); currentTime = System.currentTimeMillis();
getResponse = client().prepareGet("test", "type1", "1").setFields("_ttl").setRealtime(false).get(); getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(false).get();
if (getResponse.isExists()) { if (getResponse.isExists()) {
ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue(); ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue();
assertThat(ttl0, lessThanOrEqualTo(providedTTLValue - (currentTime - now))); assertThat(ttl0, lessThanOrEqualTo(providedTTLValue - (currentTime - now)));
@ -154,10 +154,10 @@ public class SimpleTTLIT extends ESIntegTestCase {
} }
// no TTL provided so no TTL fetched // no TTL provided so no TTL fetched
getResponse = client().prepareGet("test", "type1", "no_ttl").setFields("_ttl").setRealtime(true).execute().actionGet(); getResponse = client().prepareGet("test", "type1", "no_ttl").setStoredFields("_ttl").setRealtime(true).execute().actionGet();
assertThat(getResponse.getField("_ttl"), nullValue()); assertThat(getResponse.getField("_ttl"), nullValue());
// no TTL provided make sure it has default TTL // no TTL provided make sure it has default TTL
getResponse = client().prepareGet("test", "type2", "default_ttl").setFields("_ttl").setRealtime(true).execute().actionGet(); getResponse = client().prepareGet("test", "type2", "default_ttl").setStoredFields("_ttl").setRealtime(true).execute().actionGet();
ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue(); ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue();
assertThat(ttl0, greaterThan(0L)); assertThat(ttl0, greaterThan(0L));
@ -190,28 +190,28 @@ public class SimpleTTLIT extends ESIntegTestCase {
)); ));
// realtime get check // realtime get check
getResponse = client().prepareGet("test", "type1", "1").setFields("_ttl").setRealtime(true).execute().actionGet(); getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(true).execute().actionGet();
assertThat(getResponse.isExists(), equalTo(false)); assertThat(getResponse.isExists(), equalTo(false));
getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setFields("_ttl").setRealtime(true).execute().actionGet(); getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setStoredFields("_ttl").setRealtime(true).execute().actionGet();
assertThat(getResponse.isExists(), equalTo(false)); assertThat(getResponse.isExists(), equalTo(false));
// replica realtime get check // replica realtime get check
getResponse = client().prepareGet("test", "type1", "1").setFields("_ttl").setRealtime(true).execute().actionGet(); getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(true).execute().actionGet();
assertThat(getResponse.isExists(), equalTo(false)); assertThat(getResponse.isExists(), equalTo(false));
getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setFields("_ttl").setRealtime(true).execute().actionGet(); getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setStoredFields("_ttl").setRealtime(true).execute().actionGet();
assertThat(getResponse.isExists(), equalTo(false)); assertThat(getResponse.isExists(), equalTo(false));
// Need to run a refresh, in order for the non realtime get to work. // Need to run a refresh, in order for the non realtime get to work.
client().admin().indices().prepareRefresh("test").execute().actionGet(); client().admin().indices().prepareRefresh("test").execute().actionGet();
// non realtime get (stored) check // non realtime get (stored) check
getResponse = client().prepareGet("test", "type1", "1").setFields("_ttl").setRealtime(false).execute().actionGet(); getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(false).execute().actionGet();
assertThat(getResponse.isExists(), equalTo(false)); assertThat(getResponse.isExists(), equalTo(false));
getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setFields("_ttl").setRealtime(false).execute().actionGet(); getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setStoredFields("_ttl").setRealtime(false).execute().actionGet();
assertThat(getResponse.isExists(), equalTo(false)); assertThat(getResponse.isExists(), equalTo(false));
// non realtime get going the replica check // non realtime get going the replica check
getResponse = client().prepareGet("test", "type1", "1").setFields("_ttl").setRealtime(false).execute().actionGet(); getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(false).execute().actionGet();
assertThat(getResponse.isExists(), equalTo(false)); assertThat(getResponse.isExists(), equalTo(false));
getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setFields("_ttl").setRealtime(false).execute().actionGet(); getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setStoredFields("_ttl").setRealtime(false).execute().actionGet();
assertThat(getResponse.isExists(), equalTo(false)); assertThat(getResponse.isExists(), equalTo(false));
} }
@ -287,7 +287,7 @@ public class SimpleTTLIT extends ESIntegTestCase {
} }
private long getTtl(String type, Object id) { private long getTtl(String type, Object id) {
GetResponse getResponse = client().prepareGet("test", type, id.toString()).setFields("_ttl").execute() GetResponse getResponse = client().prepareGet("test", type, id.toString()).setStoredFields("_ttl").execute()
.actionGet(); .actionGet();
return ((Number) getResponse.getField("_ttl").getValue()).longValue(); return ((Number) getResponse.getField("_ttl").getValue()).longValue();
} }

View File

@ -123,12 +123,12 @@ public class TimestampTTLBWIT extends ESIntegTestCase {
// check TTL is kept after an update without TTL // check TTL is kept after an update without TTL
client().prepareIndex("test", "type1", "2").setSource("field", 1).setTTL(86400000L).setRefreshPolicy(IMMEDIATE).get(); client().prepareIndex("test", "type1", "2").setSource("field", 1).setTTL(86400000L).setRefreshPolicy(IMMEDIATE).get();
GetResponse getResponse = client().prepareGet("test", "type1", "2").setFields("_ttl").execute().actionGet(); GetResponse getResponse = client().prepareGet("test", "type1", "2").setStoredFields("_ttl").execute().actionGet();
long ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue(); long ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue();
assertThat(ttl, greaterThan(0L)); assertThat(ttl, greaterThan(0L));
client().prepareUpdate(indexOrAlias(), "type1", "2") client().prepareUpdate(indexOrAlias(), "type1", "2")
.setScript(new Script("field", ScriptService.ScriptType.INLINE, "field_inc", null)).execute().actionGet(); .setScript(new Script("field", ScriptService.ScriptType.INLINE, "field_inc", null)).execute().actionGet();
getResponse = client().prepareGet("test", "type1", "2").setFields("_ttl").execute().actionGet(); getResponse = client().prepareGet("test", "type1", "2").setStoredFields("_ttl").execute().actionGet();
ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue(); ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue();
assertThat(ttl, greaterThan(0L)); assertThat(ttl, greaterThan(0L));
@ -136,7 +136,7 @@ public class TimestampTTLBWIT extends ESIntegTestCase {
client().prepareUpdate(indexOrAlias(), "type1", "2") client().prepareUpdate(indexOrAlias(), "type1", "2")
.setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values", .setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values",
Collections.singletonMap("_ctx", Collections.singletonMap("_ttl", 3600000)))).execute().actionGet(); Collections.singletonMap("_ctx", Collections.singletonMap("_ttl", 3600000)))).execute().actionGet();
getResponse = client().prepareGet("test", "type1", "2").setFields("_ttl").execute().actionGet(); getResponse = client().prepareGet("test", "type1", "2").setStoredFields("_ttl").execute().actionGet();
ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue(); ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue();
assertThat(ttl, greaterThan(0L)); assertThat(ttl, greaterThan(0L));
assertThat(ttl, lessThanOrEqualTo(3600000L)); assertThat(ttl, lessThanOrEqualTo(3600000L));
@ -147,7 +147,7 @@ public class TimestampTTLBWIT extends ESIntegTestCase {
.setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values", .setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values",
Collections.singletonMap("_ctx", Collections.singletonMap("_timestamp", "2009-11-15T14:12:12")))).execute() Collections.singletonMap("_ctx", Collections.singletonMap("_timestamp", "2009-11-15T14:12:12")))).execute()
.actionGet(); .actionGet();
getResponse = client().prepareGet("test", "type1", "3").setFields("_timestamp").execute().actionGet(); getResponse = client().prepareGet("test", "type1", "3").setStoredFields("_timestamp").execute().actionGet();
long timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue(); long timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue();
assertThat(timestamp, equalTo(1258294332000L)); assertThat(timestamp, equalTo(1258294332000L));
} }

View File

@ -469,7 +469,7 @@ public class UpdateIT extends ESIntegTestCase {
UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
.setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) .setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject())
.setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values", Collections.singletonMap("extra", "foo"))) .setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values", Collections.singletonMap("extra", "foo")))
.setFields("_source") .setFetchSource(true)
.execute().actionGet(); .execute().actionGet();
assertThat(updateResponse.getIndex(), equalTo("test")); assertThat(updateResponse.getIndex(), equalTo("test"));
@ -549,7 +549,7 @@ public class UpdateIT extends ESIntegTestCase {
UpdateResponse updateResponse = client().prepareUpdate("test", "type1", "1") UpdateResponse updateResponse = client().prepareUpdate("test", "type1", "1")
.setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject()) .setUpsert(XContentFactory.jsonBuilder().startObject().field("bar", "baz").endObject())
.setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values", Collections.singletonMap("extra", "foo"))) .setScript(new Script("", ScriptService.ScriptType.INLINE, "put_values", Collections.singletonMap("extra", "foo")))
.setFields("_source") .setFetchSource(true)
.execute().actionGet(); .execute().actionGet();
assertThat(updateResponse.getIndex(), equalTo("test")); assertThat(updateResponse.getIndex(), equalTo("test"));
@ -624,14 +624,30 @@ public class UpdateIT extends ESIntegTestCase {
// check fields parameter // check fields parameter
client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet(); client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet();
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1") updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
.setScript(new Script("field", ScriptService.ScriptType.INLINE, "field_inc", null)).setFields("_source", "field") .setScript(new Script("field", ScriptService.ScriptType.INLINE, "field_inc", null))
.execute().actionGet(); .setFields("field")
.setFetchSource(true)
.execute().actionGet();
assertThat(updateResponse.getIndex(), equalTo("test")); assertThat(updateResponse.getIndex(), equalTo("test"));
assertThat(updateResponse.getGetResult(), notNullValue()); assertThat(updateResponse.getGetResult(), notNullValue());
assertThat(updateResponse.getGetResult().getIndex(), equalTo("test")); assertThat(updateResponse.getGetResult().getIndex(), equalTo("test"));
assertThat(updateResponse.getGetResult().sourceRef(), notNullValue()); assertThat(updateResponse.getGetResult().sourceRef(), notNullValue());
assertThat(updateResponse.getGetResult().field("field").getValue(), notNullValue()); assertThat(updateResponse.getGetResult().field("field").getValue(), notNullValue());
// check _source parameter
client().prepareIndex("test", "type1", "1").setSource("field1", 1, "field2", 2).execute().actionGet();
updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
.setScript(new Script("field1", ScriptService.ScriptType.INLINE, "field_inc", null))
.setFetchSource("field1", "field2")
.get();
assertThat(updateResponse.getIndex(), equalTo("test"));
assertThat(updateResponse.getGetResult(), notNullValue());
assertThat(updateResponse.getGetResult().getIndex(), equalTo("test"));
assertThat(updateResponse.getGetResult().sourceRef(), notNullValue());
assertThat(updateResponse.getGetResult().field("field1"), nullValue());
assertThat(updateResponse.getGetResult().sourceAsMap().size(), equalTo(1));
assertThat(updateResponse.getGetResult().sourceAsMap().get("field1"), equalTo(2));
// check updates without script // check updates without script
// add new field // add new field
client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet(); client().prepareIndex("test", "type1", "1").setSource("field", 1).execute().actionGet();

View File

@ -154,7 +154,7 @@ times an update should be retried in the case of a version conflict.
The `update` action payload, supports the following options: `doc` The `update` action payload, supports the following options: `doc`
(partial document), `upsert`, `doc_as_upsert`, `script`, `params` (for (partial document), `upsert`, `doc_as_upsert`, `script`, `params` (for
script), `lang` (for script) and `fields`. See update documentation for details on script), `lang` (for script) and `_source`. See update documentation for details on
the options. Curl example with update actions: the options. Curl example with update actions:
[source,js] [source,js]
@ -165,10 +165,10 @@ the options. Curl example with update actions:
{ "script" : { "inline": "ctx._source.counter += params.param1", "lang" : "painless", "params" : {"param1" : 1}}, "upsert" : {"counter" : 1}} { "script" : { "inline": "ctx._source.counter += params.param1", "lang" : "painless", "params" : {"param1" : 1}}, "upsert" : {"counter" : 1}}
{ "update" : {"_id" : "2", "_type" : "type1", "_index" : "index1", "_retry_on_conflict" : 3} } { "update" : {"_id" : "2", "_type" : "type1", "_index" : "index1", "_retry_on_conflict" : 3} }
{ "doc" : {"field" : "value"}, "doc_as_upsert" : true } { "doc" : {"field" : "value"}, "doc_as_upsert" : true }
{ "update" : {"_id" : "3", "_type" : "type1", "_index" : "index1", "fields" : ["_source"]} } { "update" : {"_id" : "3", "_type" : "type1", "_index" : "index1", "_source" : true} }
{ "doc" : {"field" : "value"} } { "doc" : {"field" : "value"} }
{ "update" : {"_id" : "4", "_type" : "type1", "_index" : "index1"} } { "update" : {"_id" : "4", "_type" : "type1", "_index" : "index1"} }
{ "doc" : {"field" : "value"}, "fields": ["_source"]} { "doc" : {"field" : "value"}, "_source": true}
-------------------------------------------------- --------------------------------------------------
[float] [float]

View File

@ -52,10 +52,6 @@ call in-place to make the document visible. This will also make other documents
changed since the last refresh visible. In order to disable realtime GET, changed since the last refresh visible. In order to disable realtime GET,
one can set the `realtime` parameter to `false`. one can set the `realtime` parameter to `false`.
When getting a document, one can specify `fields` to fetch from it. They
will, when possible, be fetched as stored fields (fields mapped as
<<mapping-store,stored>> in the mapping).
[float] [float]
[[type]] [[type]]
=== Optional Type === Optional Type
@ -69,7 +65,7 @@ to fetch the first document matching the id across all types.
=== Source filtering === Source filtering
By default, the get operation returns the contents of the `_source` field unless By default, the get operation returns the contents of the `_source` field unless
you have used the `fields` parameter or if the `_source` field is disabled. you have used the `stored_fields` parameter or if the `_source` field is disabled.
You can turn off `_source` retrieval by using the `_source` parameter: You can turn off `_source` retrieval by using the `_source` parameter:
[source,js] [source,js]
@ -96,25 +92,122 @@ curl -XGET 'http://localhost:9200/twitter/tweet/1?_source=*.id,retweeted'
[float] [float]
[[get-fields]] [[get-stored-fields]]
=== Fields === Stored Fields
The get operation allows specifying a set of stored fields that will be The get operation allows specifying a set of stored fields that will be
returned by passing the `fields` parameter. For example: returned by passing the `stored_fields` parameter.
If the requested fields are not stored, they will be ignored.
Consider for instance the following mapping:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
curl -XGET 'http://localhost:9200/twitter/tweet/1?fields=title,content' PUT twitter
{
"mappings": {
"tweet": {
"properties": {
"counter": {
"type": "integer",
"store": false
},
"tags": {
"type": "keyword",
"store": true
}
}
}
}
}
-------------------------------------------------- --------------------------------------------------
// CONSOLE
For backward compatibility, if the requested fields are not stored, they will be fetched Now we can add a document:
from the `_source` (parsed and extracted). This functionality has been replaced by the
<<get-source-filtering,source filtering>> parameter.
Field values fetched from the document it self are always returned as an array. Metadata fields like `_routing` and [source,js]
`_parent` fields are never returned as an array. --------------------------------------------------
PUT twitter/tweet/1
{
"counter" : 1,
"tags" : ["red"]
}
--------------------------------------------------
// CONSOLE
// TEST[continued]
Also only leaf fields can be returned via the `field` option. So object fields can't be returned and such requests ... and try to retrieve it:
[source,js]
--------------------------------------------------
GET twitter/tweet/1?stored_fields=tags,counter
--------------------------------------------------
// CONSOLE
// TEST[continued]
The result of the above get operation is:
[source,js]
--------------------------------------------------
{
"_index": "twitter",
"_type": "tweet",
"_id": "1",
"_version": 1,
"found": true,
"fields": {
"tags": [
"red"
]
}
}
--------------------------------------------------
// TESTRESPONSE
Field values fetched from the document it self are always returned as an array.
Since the `counter` field is not stored the get request simply ignores it when trying to get the `stored_fields.`
It is also possible to retrieve metadata fields like `_routing` and `_parent` fields:
[source,js]
--------------------------------------------------
PUT twitter/tweet/2?routing=user1
{
"counter" : 1,
"tags" : ["white"]
}
--------------------------------------------------
// CONSOLE
// TEST[continued]
[source,js]
--------------------------------------------------
GET twitter/tweet/2?routing=user1&stored_fields=tags,counter
--------------------------------------------------
// CONSOLE
// TEST[continued]
The result of the above get operation is:
[source,js]
--------------------------------------------------
{
"_index": "twitter",
"_type": "tweet",
"_id": "2",
"_version": 1,
"_routing": "user1",
"found": true,
"fields": {
"tags": [
"white"
]
}
}
--------------------------------------------------
// TESTRESPONSE
Also only leaf fields can be returned via the `stored_field` option. So object fields can't be returned and such requests
will fail. will fail.
[float] [float]

View File

@ -155,7 +155,7 @@ curl 'localhost:9200/_mget' -d '{
[[mget-fields]] [[mget-fields]]
=== Fields === Fields
Specific stored fields can be specified to be retrieved per document to get, similar to the <<get-fields,fields>> parameter of the Get API. Specific stored fields can be specified to be retrieved per document to get, similar to the <<get-stored-fields,stored_fields>> parameter of the Get API.
For example: For example:
[source,js] [source,js]
@ -166,31 +166,31 @@ curl 'localhost:9200/_mget' -d '{
"_index" : "test", "_index" : "test",
"_type" : "type", "_type" : "type",
"_id" : "1", "_id" : "1",
"fields" : ["field1", "field2"] "stored_fields" : ["field1", "field2"]
}, },
{ {
"_index" : "test", "_index" : "test",
"_type" : "type", "_type" : "type",
"_id" : "2", "_id" : "2",
"fields" : ["field3", "field4"] "stored_fields" : ["field3", "field4"]
} }
] ]
}' }'
-------------------------------------------------- --------------------------------------------------
Alternatively, you can specify the `fields` parameter in the query string Alternatively, you can specify the `stored_fields` parameter in the query string
as a default to be applied to all documents. as a default to be applied to all documents.
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
curl 'localhost:9200/test/type/_mget?fields=field1,field2' -d '{ curl 'localhost:9200/test/type/_mget?stored_fields=field1,field2' -d '{
"docs" : [ "docs" : [
{ {
"_id" : "1" <1> "_id" : "1" <1>
}, },
{ {
"_id" : "2", "_id" : "2",
"fields" : ["field3", "field4"] <2> "stored_fields" : ["field3", "field4"] <2>
} }
] ]
}' }'
@ -201,7 +201,7 @@ curl 'localhost:9200/test/type/_mget?fields=field1,field2' -d '{
[float] [float]
=== Generated fields === Generated fields
See <<generated-fields>> for fields are generated only when indexing. See <<generated-fields>> for fields generated only when indexing.
[float] [float]
[[mget-routing]] [[mget-routing]]

View File

@ -17,11 +17,13 @@ For example, lets index a simple doc:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
curl -XPUT localhost:9200/test/type1/1 -d '{ PUT test/type1/1
{
"counter" : 1, "counter" : 1,
"tags" : ["red"] "tags" : ["red"]
}' }
-------------------------------------------------- --------------------------------------------------
// CONSOLE
[float] [float]
=== Scripted updates === Scripted updates
@ -30,7 +32,8 @@ Now, we can execute a script that would increment the counter:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ POST test/type1/1/_update
{
"script" : { "script" : {
"inline": "ctx._source.counter += params.count", "inline": "ctx._source.counter += params.count",
"lang": "painless", "lang": "painless",
@ -38,15 +41,18 @@ curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{
"count" : 4 "count" : 4
} }
} }
}' }
-------------------------------------------------- --------------------------------------------------
// CONSOLE
// TEST[continued]
We can add a tag to the list of tags (note, if the tag exists, it We can add a tag to the list of tags (note, if the tag exists, it
will still add it, since its a list): will still add it, since its a list):
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ POST test/type1/1/_update
{
"script" : { "script" : {
"inline": "ctx._source.tags.add(params.tag)", "inline": "ctx._source.tags.add(params.tag)",
"lang": "painless", "lang": "painless",
@ -54,8 +60,10 @@ curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{
"tag" : "blue" "tag" : "blue"
} }
} }
}' }
-------------------------------------------------- --------------------------------------------------
// CONSOLE
// TEST[continued]
In addition to `_source`, the following variables are available through In addition to `_source`, the following variables are available through
the `ctx` map: `_index`, `_type`, `_id`, `_version`, `_routing`, the `ctx` map: `_index`, `_type`, `_id`, `_version`, `_routing`,
@ -65,36 +73,45 @@ We can also add a new field to the document:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ POST test/type1/1/_update
"script" : "ctx._source.name_of_new_field = \"value_of_new_field\"" {
}' "script" : "ctx._source.new_field = \"value_of_new_field\""
}
-------------------------------------------------- --------------------------------------------------
// CONSOLE
// TEST[continued]
Or remove a field from the document: Or remove a field from the document:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ POST test/type1/1/_update
"script" : "ctx._source.remove(\"name_of_field\")" {
}' "script" : "ctx._source.remove(\"new_field\")"
}
-------------------------------------------------- --------------------------------------------------
// CONSOLE
// TEST[continued]
And, we can even change the operation that is executed. This example deletes And, we can even change the operation that is executed. This example deletes
the doc if the `tags` field contain `blue`, otherwise it does nothing the doc if the `tags` field contain `green`, otherwise it does nothing
(`noop`): (`noop`):
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ POST test/type1/1/_update
{
"script" : { "script" : {
"inline": "ctx._source.tags.contains(params.tag) ? ctx.op = \"delete\" : ctx.op = \"none\"", "inline": "if (ctx._source.tags.contains(params.tag)) { ctx.op = \"delete\" } else { ctx.op = \"none\" }",
"lang": "painless", "lang": "painless",
"params" : { "params" : {
"tag" : "blue" "tag" : "green"
} }
} }
}' }
-------------------------------------------------- --------------------------------------------------
// CONSOLE
// TEST[continued]
[float] [float]
=== Updates with a partial document === Updates with a partial document
@ -106,31 +123,36 @@ example:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ POST test/type1/1/_update
{
"doc" : { "doc" : {
"name" : "new_name" "name" : "new_name"
} }
}' }
-------------------------------------------------- --------------------------------------------------
// CONSOLE
// TEST[continued]
If both `doc` and `script` are specified, then `doc` is ignored. Best is If both `doc` and `script` are specified, then `doc` is ignored. Best is
to put your field pairs of the partial document in the script itself. to put your field pairs of the partial document in the script itself.
[float] [float]
=== Detecting noop updates === Detecting noop updates
If `doc` is specified its value is merged with the existing `_source`. By
default the document is only reindexed if the new `_source` field differs from If `doc` is specified its value is merged with the existing `_source`.
the old. Setting `detect_noop` to `false` will cause Elasticsearch to always By default updates that don't change anything detect that they don't change anything and return "result": "noop" like this:
update the document even if it hasn't changed. For example:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ POST test/type1/1/_update
{
"doc" : { "doc" : {
"name" : "new_name" "name" : "new_name"
}, }
"detect_noop": false }
}'
-------------------------------------------------- --------------------------------------------------
// CONSOLE
// TEST[continued]
If `name` was `new_name` before the request was sent then the entire update If `name` was `new_name` before the request was sent then the entire update
request is ignored. The `result` element in the response returns `noop` if request is ignored. The `result` element in the response returns `noop` if
@ -139,13 +161,34 @@ the request was ignored.
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
{ {
"_shards": {
"total": 0,
"successful": 0,
"failed": 0
},
"_index": "test", "_index": "test",
"_type": "type1", "_type": "type1",
"_id": "1", "_id": "1",
"_version": 1, "_version": 6,
"result": noop "result": noop
} }
-------------------------------------------------- --------------------------------------------------
// TESTRESPONSE
You can disable this behavior by setting "detect_noop": false like this:
[source,js]
--------------------------------------------------
POST test/type1/1/_update
{
"doc" : {
"name" : "new_name"
},
"detect_noop": true
}
--------------------------------------------------
// CONSOLE
// TEST[continued]
[[upserts]] [[upserts]]
[float] [float]
@ -157,7 +200,8 @@ will be inserted as a new document. If the document does exist, then the
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ POST test/type1/1/_update
{
"script" : { "script" : {
"inline": "ctx._source.counter += params.count", "inline": "ctx._source.counter += params.count",
"lang": "painless", "lang": "painless",
@ -168,8 +212,10 @@ curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{
"upsert" : { "upsert" : {
"counter" : 1 "counter" : 1
} }
}' }
-------------------------------------------------- --------------------------------------------------
// CONSOLE
// TEST[continued]
[float] [float]
==== `scripted_upsert` ==== `scripted_upsert`
@ -180,7 +226,8 @@ or not -- i.e. the script handles initializing the document instead of the
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
curl -XPOST 'localhost:9200/sessions/session/dh3sgudg8gsrgl/_update' -d '{ POST sessions/session/dh3sgudg8gsrgl/_update
{
"scripted_upsert":true, "scripted_upsert":true,
"script" : { "script" : {
"id": "my_web_session_summariser", "id": "my_web_session_summariser",
@ -193,7 +240,7 @@ curl -XPOST 'localhost:9200/sessions/session/dh3sgudg8gsrgl/_update' -d '{
} }
}, },
"upsert" : {} "upsert" : {}
}' }
-------------------------------------------------- --------------------------------------------------
[float] [float]
@ -205,13 +252,16 @@ value:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ POST test/type1/1/_update
{
"doc" : { "doc" : {
"name" : "new_name" "name" : "new_name"
}, },
"doc_as_upsert" : true "doc_as_upsert" : true
}' }
-------------------------------------------------- --------------------------------------------------
// CONSOLE
// TEST[continued]
[float] [float]
@ -255,10 +305,12 @@ See <<index-wait-for-active-shards,here>> for details.
Control when the changes made by this request are visible to search. See Control when the changes made by this request are visible to search. See
<<docs-refresh>>. <<docs-refresh>>.
`fields`:: `_source`::
Allows to control if and how the updated source should be returned in the response.
By default the updated source is not returned.
See <<search-request-source-filtering, `source filtering`>> for details.
Return the relevant fields from the updated document. Specify `_source` to
return the full updated source.
`version` & `version_type`:: `version` & `version_type`::

View File

@ -66,7 +66,7 @@ This will yield the same result as the previous request.
Set to `true` to retrieve the `_source` of the document explained. You can also Set to `true` to retrieve the `_source` of the document explained. You can also
retrieve part of the document by using `_source_include` & `_source_exclude` (see <<get-source-filtering,Get API>> for more details) retrieve part of the document by using `_source_include` & `_source_exclude` (see <<get-source-filtering,Get API>> for more details)
`fields`:: `stored_fields`::
Allows to control which stored fields to return as part of the Allows to control which stored fields to return as part of the
document explained. document explained.

View File

@ -5,7 +5,7 @@
Allows to control how the `_source` field is returned with every hit. Allows to control how the `_source` field is returned with every hit.
By default operations return the contents of the `_source` field unless By default operations return the contents of the `_source` field unless
you have used the `fields` parameter or if the `_source` field is disabled. you have used the `stored_fields` parameter or if the `_source` field is disabled.
You can turn off `_source` retrieval by using the `_source` parameter: You can turn off `_source` retrieval by using the `_source` parameter:

View File

@ -38,10 +38,7 @@ GET /_search
-------------------------------------------------- --------------------------------------------------
// CONSOLE // CONSOLE
If the requested fields are not stored (`store` mapping set to `false`), they will be ignored.
For backwards compatibility, if the fields parameter specifies fields which are not stored (`store` mapping set to
`false`), it will load the `_source` and extract it from it. This functionality has been replaced by the
<<search-request-source-filtering,source filtering>> parameter.
Field values fetched from the document it self are always returned as an array. Metadata fields like `_routing` and Field values fetched from the document it self are always returned as an array. Metadata fields like `_routing` and
`_parent` fields are never returned as an array. `_parent` fields are never returned as an array.

View File

@ -242,8 +242,8 @@ public class PercolateRequest extends ActionRequest<PercolateRequest> implements
if (source == null && getRequest == null) { if (source == null && getRequest == null) {
validationException = addValidationError("source or get is missing", validationException); validationException = addValidationError("source or get is missing", validationException);
} }
if (getRequest != null && getRequest.fields() != null) { if (getRequest != null && getRequest.storedFields() != null) {
validationException = addValidationError("get fields option isn't supported via percolate request", validationException); validationException = addValidationError("get stored fields option isn't supported via percolate request", validationException);
} }
return validationException; return validationException;
} }

View File

@ -108,7 +108,7 @@ public class SizeMappingIT extends ESIntegTestCase {
final String source = "{\"f\":10}"; final String source = "{\"f\":10}";
indexRandom(true, indexRandom(true,
client().prepareIndex("test", "type", "1").setSource(source)); client().prepareIndex("test", "type", "1").setSource(source));
GetResponse getResponse = client().prepareGet("test", "type", "1").setFields("_size").get(); GetResponse getResponse = client().prepareGet("test", "type", "1").setStoredFields("_size").get();
assertNotNull(getResponse.getField("_size")); assertNotNull(getResponse.getField("_size"));
assertEquals(source.length(), getResponse.getField("_size").getValue()); assertEquals(source.length(), getResponse.getField("_size").getValue());
} }

View File

@ -26,7 +26,7 @@
index: test index: test
type: type1 type: type1
id: 1 id: 1
fields: "_size" stored_fields: "_size"
- match: { _size: 13 } - match: { _size: 13 }

View File

@ -39,7 +39,19 @@
}, },
"fields": { "fields": {
"type": "list", "type": "list",
"description" : "Default comma-separated list of fields to return in the response for updates" "description" : "Default comma-separated list of fields to return in the response for updates, can be overridden on each sub-request"
},
"_source": {
"type" : "list",
"description" : "True or false to return the _source field or not, or default list of fields to return, can be overridden on each sub-request"
},
"_source_exclude": {
"type" : "list",
"description" : "Default list of fields to exclude from the returned _source field, can be overridden on each sub-request"
},
"_source_include": {
"type" : "list",
"description" : "Default list of fields to extract and return from the _source field, can be overridden on each sub-request"
}, },
"pipeline" : { "pipeline" : {
"type" : "string", "type" : "string",

View File

@ -40,13 +40,13 @@
"type" : "boolean", "type" : "boolean",
"description" : "Specify whether to return detailed information about score computation as part of a hit" "description" : "Specify whether to return detailed information about score computation as part of a hit"
}, },
"fields": { "stored_fields": {
"type" : "list", "type" : "list",
"description" : "A comma-separated list of fields to return as part of a hit" "description" : "A comma-separated list of stored fields to return as part of a hit"
}, },
"fielddata_fields": { "docvalue_fields": {
"type" : "list", "type" : "list",
"description" : "A comma-separated list of fields to return as the field data representation of a field for each hit" "description" : "A comma-separated list of fields to return as the docvalue representation of a field for each hit"
}, },
"from": { "from": {
"type" : "number", "type" : "number",

View File

@ -41,9 +41,9 @@
"type" : "string", "type" : "string",
"description" : "The default field for query string query (default: _all)" "description" : "The default field for query string query (default: _all)"
}, },
"fields": { "stored_fields": {
"type": "list", "type": "list",
"description" : "A comma-separated list of fields to return in the response" "description" : "A comma-separated list of stored fields to return in the response"
}, },
"lenient": { "lenient": {
"type" : "boolean", "type" : "boolean",

View File

@ -23,9 +23,9 @@
} }
}, },
"params": { "params": {
"fields": { "stored_fields": {
"type": "list", "type": "list",
"description" : "A comma-separated list of fields to return in the response" "description" : "A comma-separated list of stored fields to return in the response"
}, },
"parent": { "parent": {
"type" : "string", "type" : "string",

View File

@ -16,9 +16,9 @@
} }
}, },
"params": { "params": {
"fields": { "stored_fields": {
"type": "list", "type": "list",
"description" : "A comma-separated list of fields to return in the response" "description" : "A comma-separated list of stored fields to return in the response"
}, },
"preference": { "preference": {
"type" : "string", "type" : "string",

View File

@ -31,6 +31,18 @@
"type": "list", "type": "list",
"description": "A comma-separated list of fields to return in the response" "description": "A comma-separated list of fields to return in the response"
}, },
"_source": {
"type" : "list",
"description" : "True or false to return the _source field or not, or a list of fields to return"
},
"_source_exclude": {
"type" : "list",
"description" : "A list of fields to exclude from the returned _source field"
},
"_source_include": {
"type" : "list",
"description" : "A list of fields to extract and return from the _source field"
},
"lang": { "lang": {
"type": "string", "type": "string",
"description": "The script language (default: groovy)" "description": "The script language (default: groovy)"

View File

@ -40,13 +40,17 @@
"type" : "boolean", "type" : "boolean",
"description" : "Specify whether to return detailed information about score computation as part of a hit" "description" : "Specify whether to return detailed information about score computation as part of a hit"
}, },
"fields": { "stored_fields": {
"type" : "list", "type" : "list",
"description" : "A comma-separated list of fields to return as part of a hit" "description" : "A comma-separated list of stored fields to return as part of a hit"
},
"docvalue_fields": {
"type" : "list",
"description" : "A comma-separated list of fields to return as the docvalue representation of a field for each hit"
}, },
"fielddata_fields": { "fielddata_fields": {
"type" : "list", "type" : "list",
"description" : "A comma-separated list of fields to return as the field data representation of a field for each hit" "description" : "A comma-separated list of fields to return as the docvalue representation of a field for each hit"
}, },
"from": { "from": {
"type" : "number", "type" : "number",

View File

@ -1,49 +0,0 @@
---
"Fields":
- do:
index:
refresh: true
index: test_index
type: test_type
id: test_id_1
body: { "foo": "bar" }
- do:
index:
refresh: true
index: test_index
type: test_type
id: test_id_2
body: { "foo": "qux" }
- do:
index:
refresh: true
index: test_index
type: test_type
id: test_id_3
body: { "foo": "corge" }
- do:
bulk:
refresh: true
body: |
{ "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_1", "fields": ["_source"] } }
{ "doc": { "foo": "baz" } }
{ "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_2" } }
{ "fields": ["_source"], "doc": { "foo": "quux" } }
- match: { items.0.update.get._source.foo: baz }
- match: { items.1.update.get._source.foo: quux }
- do:
bulk:
index: test_index
type: test_type
fields: _source
body: |
{ "update": { "_id": "test_id_3" } }
{ "doc": { "foo": "garply" } }
- match: { items.0.update.get._source.foo: garply }

View File

@ -0,0 +1,76 @@
---
"Source filtering":
- do:
index:
refresh: true
index: test_index
type: test_type
id: test_id_1
body: { "foo": "bar", "bar": "foo" }
- do:
index:
refresh: true
index: test_index
type: test_type
id: test_id_2
body: { "foo": "qux", "bar": "pux" }
- do:
index:
refresh: true
index: test_index
type: test_type
id: test_id_3
body: { "foo": "corge", "bar": "forge" }
- do:
bulk:
refresh: true
body: |
{ "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_1", "_source": true } }
{ "doc": { "foo": "baz" } }
{ "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_2" } }
{ "_source": true, "doc": { "foo": "quux" } }
- match: { items.0.update.get._source.foo: baz }
- match: { items.1.update.get._source.foo: quux }
- do:
bulk:
index: test_index
type: test_type
_source: true
body: |
{ "update": { "_id": "test_id_3" } }
{ "doc": { "foo": "garply" } }
- match: { items.0.update.get._source.foo: garply }
- do:
bulk:
refresh: true
body: |
{ "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_1", "_source": {"includes": "bar"} } }
{ "doc": { "foo": "baz" } }
{ "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_2" } }
{ "_source": {"includes": "foo"}, "doc": { "foo": "quux" } }
- match: { items.0.update.get._source.bar: foo }
- is_false: items.0.update.get._source.foo
- match: { items.1.update.get._source.foo: quux }
- is_false: items.1.update.get._source.bar
- do:
bulk:
index: test_index
type: test_type
_source_include: foo
body: |
{ "update": { "_id": "test_id_3" } }
{ "doc": { "foo": "garply" } }
- match: { items.0.update.get._source.foo: garply }
- is_false: items.0.update.get._source.bar

View File

@ -28,7 +28,7 @@
type: test type: test
id: 1 id: 1
routing: 5 routing: 5
fields: [_routing] stored_fields: [_routing]
- match: { _id: "1"} - match: { _id: "1"}
- match: { _routing: "5"} - match: { _routing: "5"}

View File

@ -31,7 +31,7 @@
type: test type: test
id: 1 id: 1
parent: 5 parent: 5
fields: [_parent, _routing] stored_fields: [_parent, _routing]
- match: { _id: "1"} - match: { _id: "1"}
- match: { _parent: "5"} - match: { _parent: "5"}

View File

@ -32,7 +32,7 @@
id: 1 id: 1
parent: 5 parent: 5
routing: 4 routing: 4
fields: [_parent, _routing] stored_fields: [_parent, _routing]
- match: { _id: "1"} - match: { _id: "1"}
- match: { _parent: "5"} - match: { _parent: "5"}

View File

@ -1,5 +1,5 @@
--- ---
"Fields": "Stored fields":
- do: - do:
indices.create: indices.create:
@ -26,7 +26,7 @@
index: test_1 index: test_1
type: test type: test
id: 1 id: 1
fields: foo stored_fields: foo
- match: { _index: test_1 } - match: { _index: test_1 }
- match: { _type: test } - match: { _type: test }
@ -39,7 +39,7 @@
index: test_1 index: test_1
type: test type: test
id: 1 id: 1
fields: [foo, count] stored_fields: [foo, count]
- match: { fields.foo: [bar] } - match: { fields.foo: [bar] }
- match: { fields.count: [1] } - match: { fields.count: [1] }
@ -50,7 +50,7 @@
index: test_1 index: test_1
type: test type: test
id: 1 id: 1
fields: [foo, count, _source] stored_fields: [foo, count, _source]
- match: { fields.foo: [bar] } - match: { fields.foo: [bar] }
- match: { fields.count: [1] } - match: { fields.count: [1] }

View File

@ -25,7 +25,7 @@ setup:
type: test type: test
id: 1 id: 1
parent: 中文 parent: 中文
fields: [_parent, _routing] stored_fields: [_parent, _routing]
- match: { _id: "1"} - match: { _id: "1"}
- match: { _parent: 中文 } - match: { _parent: 中文 }

View File

@ -28,7 +28,7 @@
type: test type: test
id: 1 id: 1
routing: 5 routing: 5
fields: [_routing] stored_fields: [_routing]
- match: { _id: "1"} - match: { _id: "1"}
- match: { _routing: "5"} - match: { _routing: "5"}

View File

@ -32,7 +32,7 @@
id: 1 id: 1
parent: 5 parent: 5
routing: 4 routing: 4
fields: [_parent, _routing] stored_fields: [_parent, _routing]
- match: { _id: "1"} - match: { _id: "1"}
- match: { _parent: "5"} - match: { _parent: "5"}

View File

@ -58,7 +58,7 @@
index: test_1 index: test_1
type: test type: test
id: 1 id: 1
fields: count stored_fields: count
_source: true _source: true
- match: { _index: test_1 } - match: { _index: test_1 }

View File

@ -28,7 +28,7 @@
type: test type: test
id: 1 id: 1
routing: 5 routing: 5
fields: [_routing] stored_fields: [_routing]
- match: { _id: "1"} - match: { _id: "1"}
- match: { _routing: "5"} - match: { _routing: "5"}

View File

@ -31,7 +31,7 @@
type: test type: test
id: 1 id: 1
parent: 5 parent: 5
fields: [_parent, _routing] stored_fields: [_parent, _routing]
- match: { _id: "1"} - match: { _id: "1"}
- match: { _parent: "5"} - match: { _parent: "5"}

View File

@ -32,7 +32,7 @@
id: 1 id: 1
parent: 5 parent: 5
routing: 4 routing: 4
fields: [_parent, _routing] stored_fields: [_parent, _routing]
- match: { _id: "1"} - match: { _id: "1"}
- match: { _parent: "5"} - match: { _parent: "5"}

View File

@ -1,5 +1,5 @@
--- ---
"Fields": "Stored fields":
- do: - do:
indices.create: indices.create:
@ -29,9 +29,9 @@
body: body:
docs: docs:
- { _id: 1 } - { _id: 1 }
- { _id: 1, fields: foo } - { _id: 1, stored_fields: foo }
- { _id: 1, fields: [foo] } - { _id: 1, stored_fields: [foo] }
- { _id: 1, fields: [foo, _source] } - { _id: 1, stored_fields: [foo, _source] }
- is_false: docs.0.fields - is_false: docs.0.fields
- match: { docs.0._source: { foo: bar }} - match: { docs.0._source: { foo: bar }}
@ -49,13 +49,13 @@
mget: mget:
index: test_1 index: test_1
type: test type: test
fields: foo stored_fields: foo
body: body:
docs: docs:
- { _id: 1 } - { _id: 1 }
- { _id: 1, fields: foo } - { _id: 1, stored_fields: foo }
- { _id: 1, fields: [foo] } - { _id: 1, stored_fields: [foo] }
- { _id: 1, fields: [foo, _source] } - { _id: 1, stored_fields: [foo, _source] }
- match: { docs.0.fields.foo: [bar] } - match: { docs.0.fields.foo: [bar] }
- is_false: docs.0._source - is_false: docs.0._source
@ -73,13 +73,13 @@
mget: mget:
index: test_1 index: test_1
type: test type: test
fields: [foo] stored_fields: [foo]
body: body:
docs: docs:
- { _id: 1 } - { _id: 1 }
- { _id: 1, fields: foo } - { _id: 1, stored_fields: foo }
- { _id: 1, fields: [foo] } - { _id: 1, stored_fields: [foo] }
- { _id: 1, fields: [foo, _source] } - { _id: 1, stored_fields: [foo, _source] }
- match: { docs.0.fields.foo: [bar] } - match: { docs.0.fields.foo: [bar] }
- is_false: docs.0._source - is_false: docs.0._source
@ -97,13 +97,13 @@
mget: mget:
index: test_1 index: test_1
type: test type: test
fields: [foo, _source] stored_fields: [foo, _source]
body: body:
docs: docs:
- { _id: 1 } - { _id: 1 }
- { _id: 1, fields: foo } - { _id: 1, stored_fields: foo }
- { _id: 1, fields: [foo] } - { _id: 1, stored_fields: [foo] }
- { _id: 1, fields: [foo, _source] } - { _id: 1, stored_fields: [foo, _source] }
- match: { docs.0.fields.foo: [bar] } - match: { docs.0.fields.foo: [bar] }
- match: { docs.0._source: { foo: bar }} - match: { docs.0._source: { foo: bar }}

View File

@ -34,9 +34,9 @@
body: body:
docs: docs:
- { _id: 1 } - { _id: 1 }
- { _id: 1, parent: 5, fields: [ _parent, _routing ] } - { _id: 1, parent: 5, stored_fields: [ _parent, _routing ] }
- { _id: 1, parent: 4, fields: [ _parent, _routing ] } - { _id: 1, parent: 4, stored_fields: [ _parent, _routing ] }
- { _id: 2, parent: 5, fields: [ _parent, _routing ] } - { _id: 2, parent: 5, stored_fields: [ _parent, _routing ] }
- is_false: docs.0.found - is_false: docs.0.found
- is_false: docs.1.found - is_false: docs.1.found

View File

@ -26,7 +26,7 @@
mget: mget:
index: test_1 index: test_1
type: test type: test
fields: [_routing] stored_fields: [_routing]
body: body:
docs: docs:
- { _id: 1 } - { _id: 1 }

View File

@ -29,7 +29,7 @@
mget: mget:
index: test_1 index: test_1
type: test type: test
fields: [ _routing , _parent] stored_fields: [ _routing , _parent]
body: body:
docs: docs:
- { _id: 1 } - { _id: 1 }

View File

@ -30,7 +30,7 @@
type: test type: test
id: 1 id: 1
routing: 5 routing: 5
fields: _routing stored_fields: _routing
- match: { _routing: "5"} - match: { _routing: "5"}
@ -49,9 +49,9 @@
type: test type: test
id: 1 id: 1
routing: 5 routing: 5
fields: foo _source: foo
body: body:
doc: { foo: baz } doc: { foo: baz }
- match: { get.fields.foo: [baz] } - match: { get._source.foo: baz }

View File

@ -36,7 +36,7 @@ setup:
type: test type: test
id: 1 id: 1
parent: 5 parent: 5
fields: [_parent, _routing] stored_fields: [_parent, _routing]
- match: { _parent: "5"} - match: { _parent: "5"}
- match: { _routing: "5"} - match: { _routing: "5"}
@ -47,11 +47,11 @@ setup:
type: test type: test
id: 1 id: 1
parent: 5 parent: 5
fields: foo _source: foo
body: body:
doc: { foo: baz } doc: { foo: baz }
- match: { get.fields.foo: [baz] } - match: { get._source.foo: baz }
--- ---
"Parent omitted": "Parent omitted":

View File

@ -34,7 +34,7 @@
id: 1 id: 1
routing: 4 routing: 4
parent: 5 parent: 5
fields: [_parent, _routing] stored_fields: [_parent, _routing]
- match: { _parent: "5"} - match: { _parent: "5"}
- match: { _routing: "4"} - match: { _routing: "4"}
@ -56,9 +56,9 @@
id: 1 id: 1
parent: 5 parent: 5
routing: 4 routing: 4
fields: foo _source: foo
body: body:
doc: { foo: baz } doc: { foo: baz }
- match: { get.fields.foo: [baz] } - match: { get._source.foo: baz }

View File

@ -1,19 +1,18 @@
--- ---
"Fields": "Source filtering":
- do: - do:
update: update:
index: test_1 index: test_1
type: test type: test
id: 1 id: 1
fields: foo,bar,_source _source: [foo, bar]
body: body:
doc: { foo: baz } doc: { foo: baz }
upsert: { foo: bar } upsert: { foo: bar }
- match: { get._source.foo: bar } - match: { get._source.foo: bar }
- match: { get.fields.foo: [bar] } - is_false: get._source.bar
- is_false: get.fields.bar
# TODO: # TODO:
# #

View File

@ -33,6 +33,6 @@
type: test type: test
id: 1 id: 1
parent: 5 parent: 5
fields: [ _parent, _routing ] stored_fields: [ _parent, _routing ]