Add the ability to disable the retrieval of the stored fields entirely
This change adds a special field named _none_ that allows to disable the retrieval of the stored fields in a search request or in a TopHitsAggregation. To completely disable stored fields retrieval (including disabling metadata fields retrieval such as _id or _type) use _none_ like this: ```` POST _search { "stored_fields": "_none_" } ````
This commit is contained in:
parent
9cce45d4af
commit
4682fc34ae
|
@ -249,14 +249,6 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets no fields to be loaded, resulting in only id and type to be returned per field.
|
||||
*/
|
||||
public SearchRequestBuilder setNoStoredFields() {
|
||||
sourceBuilder().noStoredFields();
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicates whether the response should contain the stored _source for every hit
|
||||
*/
|
||||
|
@ -302,7 +294,6 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
|
|||
|
||||
/**
|
||||
* Adds a stored field to load and return (note, it must be stored) as part of the search request.
|
||||
* If none are specified, the source of the document will be return.
|
||||
*/
|
||||
public SearchRequestBuilder addStoredField(String field) {
|
||||
sourceBuilder().storedField(field);
|
||||
|
@ -380,9 +371,8 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
|
|||
}
|
||||
|
||||
/**
|
||||
* Sets the stored fields to load and return as part of the search request. If none
|
||||
* are specified, the source of the document will be returned.
|
||||
*
|
||||
* Adds stored fields to load and return (note, it must be stored) as part of the search request.
|
||||
* To disable the stored fields entirely (source and metadata fields) use {@code storedField("_none_")}.
|
||||
* @deprecated Use {@link SearchRequestBuilder#storedFields(String...)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
|
@ -392,8 +382,8 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
|
|||
}
|
||||
|
||||
/**
|
||||
* Sets the fields to load and return as part of the search request. If none
|
||||
* are specified, the source of the document will be returned.
|
||||
* Adds stored fields to load and return (note, it must be stored) as part of the search request.
|
||||
* To disable the stored fields entirely (source and metadata fields) use {@code storedField("_none_")}.
|
||||
*/
|
||||
public SearchRequestBuilder storedFields(String... fields) {
|
||||
sourceBuilder().storedFields(Arrays.asList(fields));
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.elasticsearch.script.ScriptContext;
|
|||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
|
@ -137,7 +138,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
|
|||
private boolean version;
|
||||
private boolean trackScores;
|
||||
|
||||
private List<String> storedFieldNames;
|
||||
private StoredFieldsContext storedFieldsContext;
|
||||
private QueryBuilder query = DEFAULT_INNER_HIT_QUERY;
|
||||
private List<SortBuilder<?>> sorts;
|
||||
private List<String> docValueFields;
|
||||
|
@ -156,14 +157,14 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
|
|||
explain = other.explain;
|
||||
version = other.version;
|
||||
trackScores = other.trackScores;
|
||||
if (other.storedFieldNames != null) {
|
||||
storedFieldNames = new ArrayList<>(other.storedFieldNames);
|
||||
if (other.storedFieldsContext != null) {
|
||||
storedFieldsContext = new StoredFieldsContext(other.storedFieldsContext);
|
||||
}
|
||||
if (other.docValueFields != null) {
|
||||
docValueFields = new ArrayList<>(other.docValueFields);
|
||||
docValueFields = new ArrayList<> (other.docValueFields);
|
||||
}
|
||||
if (other.scriptFields != null) {
|
||||
scriptFields = new HashSet<>(other.scriptFields);
|
||||
scriptFields = new HashSet<> (other.scriptFields);
|
||||
}
|
||||
if (other.fetchSourceContext != null) {
|
||||
fetchSourceContext = new FetchSourceContext(
|
||||
|
@ -210,7 +211,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
|
|||
explain = in.readBoolean();
|
||||
version = in.readBoolean();
|
||||
trackScores = in.readBoolean();
|
||||
storedFieldNames = (List<String>) in.readGenericValue();
|
||||
storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new);
|
||||
docValueFields = (List<String>) in.readGenericValue();
|
||||
if (in.readBoolean()) {
|
||||
int size = in.readVInt();
|
||||
|
@ -248,14 +249,14 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
|
|||
out.writeBoolean(explain);
|
||||
out.writeBoolean(version);
|
||||
out.writeBoolean(trackScores);
|
||||
out.writeGenericValue(storedFieldNames);
|
||||
out.writeOptionalWriteable(storedFieldsContext);
|
||||
out.writeGenericValue(docValueFields);
|
||||
boolean hasScriptFields = scriptFields != null;
|
||||
out.writeBoolean(hasScriptFields);
|
||||
if (hasScriptFields) {
|
||||
out.writeVInt(scriptFields.size());
|
||||
for (ScriptField scriptField : scriptFields) {
|
||||
scriptField.writeTo(out);;
|
||||
scriptField.writeTo(out);
|
||||
}
|
||||
}
|
||||
out.writeOptionalStreamable(fetchSourceContext);
|
||||
|
@ -343,39 +344,42 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
|
|||
/**
|
||||
* Gets the stored fields to load and return.
|
||||
*
|
||||
* @deprecated Use {@link InnerHitBuilder#getStoredFieldNames()} instead.
|
||||
* @deprecated Use {@link InnerHitBuilder#getStoredFieldsContext()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public List<String> getFieldNames() {
|
||||
return storedFieldNames;
|
||||
return storedFieldsContext == null ? null : storedFieldsContext.fieldNames();
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the stored fields to load and return. If none
|
||||
* are specified, the source of the document will be returned.
|
||||
* Sets the stored fields to load and return.
|
||||
* If none are specified, the source of the document will be returned.
|
||||
*
|
||||
* @deprecated Use {@link InnerHitBuilder#setStoredFieldNames(List)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public InnerHitBuilder setFieldNames(List<String> fieldNames) {
|
||||
this.storedFieldNames = fieldNames;
|
||||
return this;
|
||||
return setStoredFieldNames(fieldNames);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Gets the stored fields to load and return.
|
||||
* Gets the stored fields context.
|
||||
*/
|
||||
public List<String> getStoredFieldNames() {
|
||||
return storedFieldNames;
|
||||
public StoredFieldsContext getStoredFieldsContext() {
|
||||
return storedFieldsContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the stored fields to load and return. If none
|
||||
* are specified, the source of the document will be returned.
|
||||
* Sets the stored fields to load and return.
|
||||
* If none are specified, the source of the document will be returned.
|
||||
*/
|
||||
public InnerHitBuilder setStoredFieldNames(List<String> fieldNames) {
|
||||
this.storedFieldNames = fieldNames;
|
||||
if (storedFieldsContext == null) {
|
||||
storedFieldsContext = StoredFieldsContext.fromList(fieldNames);
|
||||
} else {
|
||||
storedFieldsContext.addFieldNames(fieldNames);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -564,14 +568,8 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
|
|||
innerHitsContext.explain(explain);
|
||||
innerHitsContext.version(version);
|
||||
innerHitsContext.trackScores(trackScores);
|
||||
if (storedFieldNames != null) {
|
||||
if (storedFieldNames.isEmpty()) {
|
||||
innerHitsContext.emptyFieldNames();
|
||||
} else {
|
||||
for (String fieldName : storedFieldNames) {
|
||||
innerHitsContext.fieldNames().add(fieldName);
|
||||
}
|
||||
}
|
||||
if (storedFieldsContext != null) {
|
||||
innerHitsContext.storedFieldsContext(storedFieldsContext);
|
||||
}
|
||||
if (docValueFields != null) {
|
||||
DocValueFieldsContext docValueFieldsContext = innerHitsContext
|
||||
|
@ -633,16 +631,8 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
|
|||
if (fetchSourceContext != null) {
|
||||
builder.field(SearchSourceBuilder._SOURCE_FIELD.getPreferredName(), fetchSourceContext, params);
|
||||
}
|
||||
if (storedFieldNames != null) {
|
||||
if (storedFieldNames.size() == 1) {
|
||||
builder.field(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), storedFieldNames.get(0));
|
||||
} else {
|
||||
builder.startArray(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName());
|
||||
for (String fieldName : storedFieldNames) {
|
||||
builder.value(fieldName);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
if (storedFieldsContext != null) {
|
||||
storedFieldsContext.toXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), builder);
|
||||
}
|
||||
if (docValueFields != null) {
|
||||
builder.startArray(SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.getPreferredName());
|
||||
|
@ -693,7 +683,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
|
|||
Objects.equals(explain, that.explain) &&
|
||||
Objects.equals(version, that.version) &&
|
||||
Objects.equals(trackScores, that.trackScores) &&
|
||||
Objects.equals(storedFieldNames, that.storedFieldNames) &&
|
||||
Objects.equals(storedFieldsContext, that.storedFieldsContext) &&
|
||||
Objects.equals(docValueFields, that.docValueFields) &&
|
||||
Objects.equals(scriptFields, that.scriptFields) &&
|
||||
Objects.equals(fetchSourceContext, that.fetchSourceContext) &&
|
||||
|
@ -705,7 +695,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(name, nestedPath, parentChildType, from, size, explain, version, trackScores, storedFieldNames,
|
||||
return Objects.hash(name, nestedPath, parentChildType, from, size, explain, version, trackScores, storedFieldsContext,
|
||||
docValueFields, scriptFields, fetchSourceContext, sorts, highlightBuilder, query, childInnerHits);
|
||||
}
|
||||
|
||||
|
|
|
@ -24,7 +24,6 @@ import org.elasticsearch.action.search.SearchType;
|
|||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
|
@ -33,7 +32,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestChannel;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -42,13 +40,12 @@ import org.elasticsearch.rest.action.RestActions;
|
|||
import org.elasticsearch.rest.action.RestStatusToXContentListener;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.search.SearchRequestParsers;
|
||||
import org.elasticsearch.search.aggregations.AggregatorParsers;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.search.suggest.SuggestBuilder;
|
||||
import org.elasticsearch.search.suggest.Suggesters;
|
||||
import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -178,18 +175,11 @@ public class RestSearchAction extends BaseRestHandler {
|
|||
"if the field is not stored");
|
||||
}
|
||||
|
||||
String sField = request.param("stored_fields");
|
||||
if (sField != null) {
|
||||
if (!Strings.hasText(sField)) {
|
||||
searchSourceBuilder.noStoredFields();
|
||||
} else {
|
||||
String[] sFields = Strings.splitStringByCommaToArray(sField);
|
||||
if (sFields != null) {
|
||||
for (String field : sFields) {
|
||||
searchSourceBuilder.storedField(field);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
StoredFieldsContext storedFieldsContext =
|
||||
StoredFieldsContext.fromRestRequest(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), request);
|
||||
if (storedFieldsContext != null) {
|
||||
searchSourceBuilder.storedFields(storedFieldsContext);
|
||||
}
|
||||
String sDocValueFields = request.param("docvalue_fields");
|
||||
if (sDocValueFields == null) {
|
||||
|
|
|
@ -67,8 +67,8 @@ import org.elasticsearch.search.fetch.QueryFetchSearchResult;
|
|||
import org.elasticsearch.search.fetch.ScrollQueryFetchSearchResult;
|
||||
import org.elasticsearch.search.fetch.ShardFetchRequest;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.DocValueField;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext.ScriptField;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.internal.DefaultSearchContext;
|
||||
|
@ -729,9 +729,6 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
|||
throw new SearchContextException(context, "failed to create RescoreSearchContext", e);
|
||||
}
|
||||
}
|
||||
if (source.storedFields() != null) {
|
||||
context.fieldNames().addAll(source.storedFields());
|
||||
}
|
||||
if (source.explain() != null) {
|
||||
context.explain(source.explain());
|
||||
}
|
||||
|
@ -823,6 +820,18 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
|||
}
|
||||
context.sliceBuilder(source.slice());
|
||||
}
|
||||
|
||||
if (source.storedFields() != null) {
|
||||
if (source.storedFields().fetchFields() == false) {
|
||||
if (context.version()) {
|
||||
throw new SearchContextException(context, "`stored_fields` cannot be disabled if version is requested");
|
||||
}
|
||||
if (context.sourceRequested()) {
|
||||
throw new SearchContextException(context, "`stored_fields` cannot be disabled if _source is requested");
|
||||
}
|
||||
}
|
||||
context.storedFieldsContext(source.storedFields());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.sort.ScoreSortBuilder;
|
||||
|
@ -63,7 +64,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
private boolean trackScores = false;
|
||||
private List<SortBuilder<?>> sorts = null;
|
||||
private HighlightBuilder highlightBuilder;
|
||||
private List<String> fieldNames;
|
||||
private StoredFieldsContext storedFieldsContext;
|
||||
private List<String> fieldDataFields;
|
||||
private Set<ScriptField> scriptFields;
|
||||
private FetchSourceContext fetchSourceContext;
|
||||
|
@ -86,13 +87,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
fieldDataFields.add(in.readString());
|
||||
}
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
int size = in.readVInt();
|
||||
fieldNames = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
fieldNames.add(in.readString());
|
||||
}
|
||||
}
|
||||
storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new);
|
||||
from = in.readVInt();
|
||||
highlightBuilder = in.readOptionalWriteable(HighlightBuilder::new);
|
||||
if (in.readBoolean()) {
|
||||
|
@ -126,14 +121,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
out.writeString(fieldName);
|
||||
}
|
||||
}
|
||||
boolean hasFieldNames = fieldNames != null;
|
||||
out.writeBoolean(hasFieldNames);
|
||||
if (hasFieldNames) {
|
||||
out.writeVInt(fieldNames.size());
|
||||
for (String fieldName : fieldNames) {
|
||||
out.writeString(fieldName);
|
||||
}
|
||||
}
|
||||
out.writeOptionalWriteable(storedFieldsContext);
|
||||
out.writeVInt(from);
|
||||
out.writeOptionalWriteable(highlightBuilder);
|
||||
boolean hasScriptFields = scriptFields != null;
|
||||
|
@ -355,47 +343,34 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
}
|
||||
|
||||
/**
|
||||
* Adds a field to load and return (note, it must be stored) as part of
|
||||
* the search request. If none are specified, the source of the document
|
||||
* will be return.
|
||||
* Adds a stored field to load and return (note, it must be stored) as part of the search request.
|
||||
* To disable the stored fields entirely (source and metadata fields) use {@code storedField("_none_")}.
|
||||
*/
|
||||
public TopHitsAggregationBuilder field(String field) {
|
||||
if (field == null) {
|
||||
throw new IllegalArgumentException("[field] must not be null: [" + name + "]");
|
||||
}
|
||||
if (fieldNames == null) {
|
||||
fieldNames = new ArrayList<>();
|
||||
}
|
||||
fieldNames.add(field);
|
||||
return this;
|
||||
public TopHitsAggregationBuilder storedField(String field) {
|
||||
return storedFields(Collections.singletonList(field));
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the fields to load and return as part of the search request. If
|
||||
* none are specified, the source of the document will be returned.
|
||||
* Sets the stored fields to load and return as part of the search request.
|
||||
* To disable the stored fields entirely (source and metadata fields) use {@code storedField("_none_")}.
|
||||
*/
|
||||
public TopHitsAggregationBuilder fields(List<String> fields) {
|
||||
public TopHitsAggregationBuilder storedFields(List<String> fields) {
|
||||
if (fields == null) {
|
||||
throw new IllegalArgumentException("[fields] must not be null: [" + name + "]");
|
||||
}
|
||||
this.fieldNames = fields;
|
||||
if (storedFieldsContext == null) {
|
||||
storedFieldsContext = StoredFieldsContext.fromList(fields);
|
||||
} else {
|
||||
storedFieldsContext.addFieldNames(fields);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets no fields to be loaded, resulting in only id and type to be
|
||||
* returned per field.
|
||||
* Gets the stored fields context
|
||||
*/
|
||||
public TopHitsAggregationBuilder noFields() {
|
||||
this.fieldNames = Collections.emptyList();
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the fields to load and return as part of the search request.
|
||||
*/
|
||||
public List<String> fields() {
|
||||
return fieldNames;
|
||||
public StoredFieldsContext storedFields() {
|
||||
return storedFieldsContext;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -552,8 +527,9 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
@Override
|
||||
protected TopHitsAggregatorFactory doBuild(AggregationContext context, AggregatorFactory<?> parent, Builder subfactoriesBuilder)
|
||||
throws IOException {
|
||||
return new TopHitsAggregatorFactory(name, type, from, size, explain, version, trackScores, sorts, highlightBuilder, fieldNames,
|
||||
fieldDataFields, scriptFields, fetchSourceContext, context, parent, subfactoriesBuilder, metaData);
|
||||
return new TopHitsAggregatorFactory(name, type, from, size, explain, version, trackScores, sorts, highlightBuilder,
|
||||
storedFieldsContext, fieldDataFields, scriptFields, fetchSourceContext, context,
|
||||
parent, subfactoriesBuilder, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -566,16 +542,8 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
if (fetchSourceContext != null) {
|
||||
builder.field(SearchSourceBuilder._SOURCE_FIELD.getPreferredName(), fetchSourceContext);
|
||||
}
|
||||
if (fieldNames != null) {
|
||||
if (fieldNames.size() == 1) {
|
||||
builder.field(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), fieldNames.get(0));
|
||||
} else {
|
||||
builder.startArray(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName());
|
||||
for (String fieldName : fieldNames) {
|
||||
builder.value(fieldName);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
if (storedFieldsContext != null) {
|
||||
storedFieldsContext.toXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), builder);
|
||||
}
|
||||
if (fieldDataFields != null) {
|
||||
builder.startArray(SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.getPreferredName());
|
||||
|
@ -630,9 +598,8 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
} else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder._SOURCE_FIELD)) {
|
||||
factory.fetchSource(FetchSourceContext.parse(context));
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder.STORED_FIELDS_FIELD)) {
|
||||
List<String> fieldNames = new ArrayList<>();
|
||||
fieldNames.add(parser.text());
|
||||
factory.fields(fieldNames);
|
||||
factory.storedFieldsContext =
|
||||
StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), context);
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder.SORT_FIELD)) {
|
||||
factory.sort(parser.text());
|
||||
} else {
|
||||
|
@ -696,16 +663,8 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
|
||||
if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder.STORED_FIELDS_FIELD)) {
|
||||
List<String> fieldNames = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
fieldNames.add(parser.text());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING
|
||||
+ "] in [" + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
factory.fields(fieldNames);
|
||||
factory.storedFieldsContext =
|
||||
StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), context);
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder.DOCVALUE_FIELDS_FIELD)) {
|
||||
List<String> fieldDataFields = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
|
@ -736,8 +695,8 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(explain, fetchSourceContext, fieldDataFields, fieldNames, from, highlightBuilder, scriptFields, size, sorts,
|
||||
trackScores, version);
|
||||
return Objects.hash(explain, fetchSourceContext, fieldDataFields, storedFieldsContext, from, highlightBuilder,
|
||||
scriptFields, size, sorts, trackScores, version);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -746,7 +705,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
return Objects.equals(explain, other.explain)
|
||||
&& Objects.equals(fetchSourceContext, other.fetchSourceContext)
|
||||
&& Objects.equals(fieldDataFields, other.fieldDataFields)
|
||||
&& Objects.equals(fieldNames, other.fieldNames)
|
||||
&& Objects.equals(storedFieldsContext, other.storedFieldsContext)
|
||||
&& Objects.equals(from, other.from)
|
||||
&& Objects.equals(highlightBuilder, other.highlightBuilder)
|
||||
&& Objects.equals(scriptFields, other.scriptFields)
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.metrics.tophits;
|
||||
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
|
@ -29,9 +30,9 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
|||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.DocValueField;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.DocValueField;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.internal.SubSearchContext;
|
||||
import org.elasticsearch.search.sort.SortAndFormats;
|
||||
|
@ -53,15 +54,16 @@ public class TopHitsAggregatorFactory extends AggregatorFactory<TopHitsAggregato
|
|||
private final boolean trackScores;
|
||||
private final List<SortBuilder<?>> sorts;
|
||||
private final HighlightBuilder highlightBuilder;
|
||||
private final List<String> fieldNames;
|
||||
private final StoredFieldsContext storedFieldsContext;
|
||||
private final List<String> docValueFields;
|
||||
private final Set<ScriptField> scriptFields;
|
||||
private final FetchSourceContext fetchSourceContext;
|
||||
|
||||
public TopHitsAggregatorFactory(String name, Type type, int from, int size, boolean explain, boolean version, boolean trackScores,
|
||||
List<SortBuilder<?>> sorts, HighlightBuilder highlightBuilder, List<String> fieldNames, List<String> docValueFields,
|
||||
Set<ScriptField> scriptFields, FetchSourceContext fetchSourceContext, AggregationContext context, AggregatorFactory<?> parent,
|
||||
AggregatorFactories.Builder subFactories, Map<String, Object> metaData) throws IOException {
|
||||
List<SortBuilder<?>> sorts, HighlightBuilder highlightBuilder, StoredFieldsContext storedFieldsContext,
|
||||
List<String> docValueFields, Set<ScriptField> scriptFields, FetchSourceContext fetchSourceContext,
|
||||
AggregationContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactories,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
super(name, type, context, parent, subFactories, metaData);
|
||||
this.from = from;
|
||||
this.size = size;
|
||||
|
@ -70,7 +72,7 @@ public class TopHitsAggregatorFactory extends AggregatorFactory<TopHitsAggregato
|
|||
this.trackScores = trackScores;
|
||||
this.sorts = sorts;
|
||||
this.highlightBuilder = highlightBuilder;
|
||||
this.fieldNames = fieldNames;
|
||||
this.storedFieldsContext = storedFieldsContext;
|
||||
this.docValueFields = docValueFields;
|
||||
this.scriptFields = scriptFields;
|
||||
this.fetchSourceContext = fetchSourceContext;
|
||||
|
@ -92,8 +94,8 @@ public class TopHitsAggregatorFactory extends AggregatorFactory<TopHitsAggregato
|
|||
subSearchContext.sort(optionalSort.get());
|
||||
}
|
||||
}
|
||||
if (fieldNames != null) {
|
||||
subSearchContext.fieldNames().addAll(fieldNames);
|
||||
if (storedFieldsContext != null) {
|
||||
subSearchContext.storedFieldsContext(storedFieldsContext);
|
||||
}
|
||||
if (docValueFields != null) {
|
||||
DocValueFieldsContext docValueFieldsContext = subSearchContext
|
||||
|
|
|
@ -44,6 +44,7 @@ import org.elasticsearch.search.aggregations.AggregationBuilder;
|
|||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.AggregatorParsers;
|
||||
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
@ -148,7 +149,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
private TimeValue timeout = null;
|
||||
private int terminateAfter = SearchContext.DEFAULT_TERMINATE_AFTER;
|
||||
|
||||
private List<String> storedFieldNames;
|
||||
private StoredFieldsContext storedFieldsContext;
|
||||
private List<String> docValueFields;
|
||||
private List<ScriptField> scriptFields;
|
||||
private FetchSourceContext fetchSourceContext;
|
||||
|
@ -184,7 +185,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
explain = in.readOptionalBoolean();
|
||||
fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new);
|
||||
docValueFields = (List<String>) in.readGenericValue();
|
||||
storedFieldNames = (List<String>) in.readGenericValue();
|
||||
storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new);
|
||||
from = in.readVInt();
|
||||
highlightBuilder = in.readOptionalWriteable(HighlightBuilder::new);
|
||||
boolean hasIndexBoost = in.readBoolean();
|
||||
|
@ -244,7 +245,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
out.writeOptionalBoolean(explain);
|
||||
out.writeOptionalStreamable(fetchSourceContext);
|
||||
out.writeGenericValue(docValueFields);
|
||||
out.writeGenericValue(storedFieldNames);
|
||||
out.writeOptionalWriteable(storedFieldsContext);
|
||||
out.writeVInt(from);
|
||||
out.writeOptionalWriteable(highlightBuilder);
|
||||
boolean hasIndexBoost = indexBoost != null;
|
||||
|
@ -711,11 +712,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
* return.
|
||||
*/
|
||||
public SearchSourceBuilder storedField(String name) {
|
||||
if (storedFieldNames == null) {
|
||||
storedFieldNames = new ArrayList<>();
|
||||
}
|
||||
storedFieldNames.add(name);
|
||||
return this;
|
||||
return storedFields(Collections.singletonList(name));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -723,24 +720,27 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
* are specified, the source of the document will be returned.
|
||||
*/
|
||||
public SearchSourceBuilder storedFields(List<String> fields) {
|
||||
this.storedFieldNames = fields;
|
||||
if (storedFieldsContext == null) {
|
||||
storedFieldsContext = StoredFieldsContext.fromList(fields);
|
||||
} else {
|
||||
storedFieldsContext.addFieldNames(fields);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets no stored fields to be loaded, resulting in only id and type to be returned
|
||||
* per field.
|
||||
* Indicates how the stored fields should be fetched.
|
||||
*/
|
||||
public SearchSourceBuilder noStoredFields() {
|
||||
this.storedFieldNames = Collections.emptyList();
|
||||
public SearchSourceBuilder storedFields(StoredFieldsContext context) {
|
||||
storedFieldsContext = context;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the stored fields to load and return as part of the search request.
|
||||
* Gets the stored fields context.
|
||||
*/
|
||||
public List<String> storedFields() {
|
||||
return storedFieldNames;
|
||||
public StoredFieldsContext storedFields() {
|
||||
return storedFieldsContext;
|
||||
}
|
||||
|
||||
|
||||
|
@ -912,7 +912,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
rewrittenBuilder.ext = ext;
|
||||
rewrittenBuilder.fetchSourceContext = fetchSourceContext;
|
||||
rewrittenBuilder.docValueFields = docValueFields;
|
||||
rewrittenBuilder.storedFieldNames = storedFieldNames;
|
||||
rewrittenBuilder.storedFieldsContext = storedFieldsContext;
|
||||
rewrittenBuilder.from = from;
|
||||
rewrittenBuilder.highlightBuilder = highlightBuilder;
|
||||
rewrittenBuilder.indexBoost = indexBoost;
|
||||
|
@ -973,7 +973,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
} else if (context.getParseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) {
|
||||
fetchSourceContext = FetchSourceContext.parse(context);
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, STORED_FIELDS_FIELD)) {
|
||||
storedField(parser.text());
|
||||
storedFieldsContext =
|
||||
StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), context);
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, SORT_FIELD)) {
|
||||
sort(parser.text());
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, PROFILE_FIELD)) {
|
||||
|
@ -1033,15 +1034,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (context.getParseFieldMatcher().match(currentFieldName, STORED_FIELDS_FIELD)) {
|
||||
storedFieldNames = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
storedFieldNames.add(parser.text());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING + "] in ["
|
||||
+ currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
storedFieldsContext = StoredFieldsContext.fromXContent(STORED_FIELDS_FIELD.getPreferredName(), context);
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, DOCVALUE_FIELDS_FIELD)) {
|
||||
docValueFields = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
|
@ -1141,16 +1134,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
builder.field(_SOURCE_FIELD.getPreferredName(), fetchSourceContext);
|
||||
}
|
||||
|
||||
if (storedFieldNames != null) {
|
||||
if (storedFieldNames.size() == 1) {
|
||||
builder.field(STORED_FIELDS_FIELD.getPreferredName(), storedFieldNames.get(0));
|
||||
} else {
|
||||
builder.startArray(STORED_FIELDS_FIELD.getPreferredName());
|
||||
for (String fieldName : storedFieldNames) {
|
||||
builder.value(fieldName);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
if (storedFieldsContext != null) {
|
||||
storedFieldsContext.toXContent(STORED_FIELDS_FIELD.getPreferredName(), builder);
|
||||
}
|
||||
|
||||
if (docValueFields != null) {
|
||||
|
@ -1349,7 +1334,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(aggregations, explain, fetchSourceContext, docValueFields, storedFieldNames, from,
|
||||
return Objects.hash(aggregations, explain, fetchSourceContext, docValueFields, storedFieldsContext, from,
|
||||
highlightBuilder, indexBoost, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields,
|
||||
size, sorts, searchAfterBuilder, sliceBuilder, stats, suggestBuilder, terminateAfter, timeout, trackScores, version, profile);
|
||||
}
|
||||
|
@ -1367,7 +1352,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
&& Objects.equals(explain, other.explain)
|
||||
&& Objects.equals(fetchSourceContext, other.fetchSourceContext)
|
||||
&& Objects.equals(docValueFields, other.docValueFields)
|
||||
&& Objects.equals(storedFieldNames, other.storedFieldNames)
|
||||
&& Objects.equals(storedFieldsContext, other.storedFieldsContext)
|
||||
&& Objects.equals(from, other.from)
|
||||
&& Objects.equals(highlightBuilder, other.highlightBuilder)
|
||||
&& Objects.equals(indexBoost, other.indexBoost)
|
||||
|
|
|
@ -92,19 +92,22 @@ public class FetchPhase implements SearchPhase {
|
|||
|
||||
@Override
|
||||
public void execute(SearchContext context) {
|
||||
FieldsVisitor fieldsVisitor;
|
||||
final FieldsVisitor fieldsVisitor;
|
||||
Set<String> fieldNames = null;
|
||||
List<String> fieldNamePatterns = null;
|
||||
if (!context.hasFieldNames()) {
|
||||
StoredFieldsContext storedFieldsContext = context.storedFieldsContext();
|
||||
|
||||
if (storedFieldsContext == null) {
|
||||
// no fields specified, default to return source if no explicit indication
|
||||
if (!context.hasScriptFields() && !context.hasFetchSourceContext()) {
|
||||
context.fetchSourceContext(new FetchSourceContext(true));
|
||||
}
|
||||
fieldsVisitor = new FieldsVisitor(context.sourceRequested());
|
||||
} else if (context.fieldNames().isEmpty()) {
|
||||
fieldsVisitor = new FieldsVisitor(context.sourceRequested());
|
||||
} else if (storedFieldsContext.fetchFields() == false) {
|
||||
// disable stored fields entirely
|
||||
fieldsVisitor = null;
|
||||
} else {
|
||||
for (String fieldName : context.fieldNames()) {
|
||||
for (String fieldName : context.storedFieldsContext().fieldNames()) {
|
||||
if (fieldName.equals(SourceFieldMapper.NAME)) {
|
||||
if (context.hasFetchSourceContext()) {
|
||||
context.fetchSourceContext().fetchSource(true);
|
||||
|
@ -133,8 +136,13 @@ public class FetchPhase implements SearchPhase {
|
|||
}
|
||||
}
|
||||
boolean loadSource = context.sourceRequested();
|
||||
fieldsVisitor = new CustomFieldsVisitor(fieldNames == null ? Collections.emptySet() : fieldNames,
|
||||
fieldNamePatterns == null ? Collections.emptyList() : fieldNamePatterns, loadSource);
|
||||
if (fieldNames == null && fieldNamePatterns == null) {
|
||||
// empty list specified, default to disable _source if no explicit indication
|
||||
fieldsVisitor = new FieldsVisitor(loadSource);
|
||||
} else {
|
||||
fieldsVisitor = new CustomFieldsVisitor(fieldNames == null ? Collections.emptySet() : fieldNames,
|
||||
fieldNamePatterns == null ? Collections.emptyList() : fieldNamePatterns, loadSource);
|
||||
}
|
||||
}
|
||||
|
||||
InternalSearchHit[] hits = new InternalSearchHit[context.docIdsToLoadSize()];
|
||||
|
@ -182,6 +190,9 @@ public class FetchPhase implements SearchPhase {
|
|||
}
|
||||
|
||||
private InternalSearchHit createSearchHit(SearchContext context, FieldsVisitor fieldsVisitor, int docId, int subDocId, LeafReaderContext subReaderContext) {
|
||||
if (fieldsVisitor == null) {
|
||||
return new InternalSearchHit(docId);
|
||||
}
|
||||
loadStoredFields(context, subReaderContext, fieldsVisitor, subDocId);
|
||||
fieldsVisitor.postProcess(context.mapperService());
|
||||
|
||||
|
@ -273,9 +284,9 @@ public class FetchPhase implements SearchPhase {
|
|||
|
||||
private Map<String, SearchHitField> getSearchFields(SearchContext context, int nestedSubDocId, Set<String> fieldNames, List<String> fieldNamePatterns, LeafReaderContext subReaderContext) {
|
||||
Map<String, SearchHitField> searchFields = null;
|
||||
if (context.hasFieldNames() && !context.fieldNames().isEmpty()) {
|
||||
if (context.hasStoredFields() && !context.storedFieldsContext().fieldNames().isEmpty()) {
|
||||
FieldsVisitor nestedFieldsVisitor = new CustomFieldsVisitor(fieldNames == null ? Collections.emptySet() : fieldNames,
|
||||
fieldNamePatterns == null ? Collections.emptyList() : fieldNamePatterns, false);
|
||||
fieldNamePatterns == null ? Collections.emptyList() : fieldNamePatterns, false);
|
||||
if (nestedFieldsVisitor != null) {
|
||||
loadStoredFields(context, subReaderContext, nestedFieldsVisitor, nestedSubDocId);
|
||||
nestedFieldsVisitor.postProcess(context.mapperService());
|
||||
|
|
|
@ -0,0 +1,195 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.fetch;
|
||||
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Context used to fetch the {@code stored_fields}.
|
||||
*/
|
||||
public class StoredFieldsContext implements Writeable {
|
||||
public static final String _NONE_ = "_none_";
|
||||
|
||||
private final List<String> fieldNames;
|
||||
private boolean fetchFields;
|
||||
|
||||
private StoredFieldsContext(boolean fetchFields) {
|
||||
this.fetchFields = fetchFields;
|
||||
this.fieldNames = null;
|
||||
}
|
||||
|
||||
private StoredFieldsContext(List<String> fieldNames) {
|
||||
Objects.requireNonNull(fieldNames, "fieldNames must not be null");
|
||||
this.fetchFields = true;
|
||||
this.fieldNames = new ArrayList<>(fieldNames);
|
||||
}
|
||||
|
||||
public StoredFieldsContext(StoredFieldsContext other) {
|
||||
this.fetchFields = other.fetchFields();
|
||||
if (other.fieldNames() != null) {
|
||||
this.fieldNames = new ArrayList<>(other.fieldNames());
|
||||
} else {
|
||||
this.fieldNames = null;
|
||||
}
|
||||
}
|
||||
|
||||
public StoredFieldsContext(StreamInput in) throws IOException {
|
||||
this.fetchFields = in.readBoolean();
|
||||
if (fetchFields) {
|
||||
this.fieldNames = (List<String>) in.readGenericValue();
|
||||
} else {
|
||||
this.fieldNames = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeBoolean(fetchFields);
|
||||
if (fetchFields) {
|
||||
out.writeGenericValue(fieldNames);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the field names to load and return as part of the search request.
|
||||
*/
|
||||
public List<String> fieldNames() {
|
||||
return fieldNames;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the field names {@code fieldNames} to the list of fields to load.
|
||||
*/
|
||||
public StoredFieldsContext addFieldNames(List<String> fieldNames) {
|
||||
if (fetchFields == false || fieldNames.contains(_NONE_)) {
|
||||
throw new IllegalArgumentException("cannot combine _none_ with other fields");
|
||||
}
|
||||
this.fieldNames.addAll(fieldNames);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a field name {@code field} to the list of fields to load.
|
||||
*/
|
||||
public StoredFieldsContext addFieldName(String field) {
|
||||
if (fetchFields == false || _NONE_.equals(field)) {
|
||||
throw new IllegalArgumentException("cannot combine _none_ with other fields");
|
||||
}
|
||||
this.fieldNames.add(field);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the stored fields should be fetched, false otherwise.
|
||||
*/
|
||||
public boolean fetchFields() {
|
||||
return fetchFields;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
StoredFieldsContext that = (StoredFieldsContext) o;
|
||||
|
||||
if (fetchFields != that.fetchFields) return false;
|
||||
return fieldNames != null ? fieldNames.equals(that.fieldNames) : that.fieldNames == null;
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = fieldNames != null ? fieldNames.hashCode() : 0;
|
||||
result = 31 * result + (fetchFields ? 1 : 0);
|
||||
return result;
|
||||
}
|
||||
|
||||
public void toXContent(String preferredName, XContentBuilder builder) throws IOException {
|
||||
if (fetchFields == false) {
|
||||
builder.field(preferredName, _NONE_);
|
||||
}
|
||||
if (fieldNames != null) {
|
||||
if (fieldNames.size() == 1) {
|
||||
builder.field(preferredName, fieldNames.get(0));
|
||||
} else {
|
||||
builder.startArray(preferredName);
|
||||
for (String fieldName : fieldNames) {
|
||||
builder.value(fieldName);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static StoredFieldsContext fromList(List<String> fieldNames) {
|
||||
if (fieldNames.size() == 1 && _NONE_.equals(fieldNames.get(0))) {
|
||||
return new StoredFieldsContext(false);
|
||||
}
|
||||
if (fieldNames.contains(_NONE_)) {
|
||||
throw new IllegalArgumentException("cannot combine _none_ with other fields");
|
||||
}
|
||||
return new StoredFieldsContext(fieldNames);
|
||||
}
|
||||
|
||||
public static StoredFieldsContext fromXContent(String fieldName, QueryParseContext context) throws IOException {
|
||||
XContentParser parser = context.parser();
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
return fromList(Collections.singletonList(parser.text()));
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
ArrayList<String> list = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
list.add(parser.text());
|
||||
}
|
||||
return fromList(list);
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Expected [" + XContentParser.Token.VALUE_STRING + "] or ["
|
||||
+ XContentParser.Token.START_ARRAY + "] in [" + fieldName + "] but found [" + token + "]",
|
||||
parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
|
||||
public static StoredFieldsContext fromRestRequest(String name, RestRequest request) {
|
||||
String sField = request.param(name);
|
||||
if (sField != null) {
|
||||
String[] sFields = Strings.splitStringByCommaToArray(sField);
|
||||
return fromList(Arrays.asList(sFields));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
|
@ -38,6 +38,9 @@ public final class ParentFieldSubFetchPhase implements FetchSubPhase {
|
|||
|
||||
@Override
|
||||
public void hitExecute(SearchContext context, HitContext hitContext) {
|
||||
if (context.storedFieldsContext() != null && context.storedFieldsContext().fetchFields() == false) {
|
||||
return ;
|
||||
}
|
||||
ParentFieldMapper parentFieldMapper = context.mapperService().documentMapper(hitContext.hit().type()).parentFieldMapper();
|
||||
if (parentFieldMapper.active() == false) {
|
||||
return;
|
||||
|
|
|
@ -31,7 +31,8 @@ public final class VersionFetchSubPhase implements FetchSubPhase {
|
|||
|
||||
@Override
|
||||
public void hitExecute(SearchContext context, HitContext hitContext) {
|
||||
if (context.version() == false) {
|
||||
if (context.version() == false ||
|
||||
(context.storedFieldsContext() != null && context.storedFieldsContext().fetchFields() == false)) {
|
||||
return;
|
||||
}
|
||||
long version = Versions.NOT_FOUND;
|
||||
|
|
|
@ -50,6 +50,7 @@ import org.elasticsearch.index.mapper.TypeFieldMapper;
|
|||
import org.elasticsearch.index.query.AbstractQueryBuilder;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
@ -106,7 +107,7 @@ public class DefaultSearchContext extends SearchContext {
|
|||
private ScrollContext scrollContext;
|
||||
private boolean explain;
|
||||
private boolean version = false; // by default, we don't return versions
|
||||
private List<String> fieldNames;
|
||||
private StoredFieldsContext storedFields;
|
||||
private ScriptFieldsContext scriptFields;
|
||||
private FetchSourceContext fetchSourceContext;
|
||||
private int from = -1;
|
||||
|
@ -651,21 +652,29 @@ public class DefaultSearchContext extends SearchContext {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean hasFieldNames() {
|
||||
return fieldNames != null;
|
||||
public boolean hasStoredFields() {
|
||||
return storedFields != null && storedFields.fieldNames() != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> fieldNames() {
|
||||
if (fieldNames == null) {
|
||||
fieldNames = new ArrayList<>();
|
||||
}
|
||||
return fieldNames;
|
||||
public boolean hasStoredFieldsContext() {
|
||||
return storedFields != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void emptyFieldNames() {
|
||||
this.fieldNames = Collections.emptyList();
|
||||
public StoredFieldsContext storedFieldsContext() {
|
||||
return storedFields;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext storedFieldsContext(StoredFieldsContext storedFieldsContext) {
|
||||
this.storedFields = storedFieldsContext;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean storedFieldsRequested() {
|
||||
return storedFields == null || storedFields.fetchFields();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.elasticsearch.index.mapper.MapperService;
|
|||
import org.elasticsearch.index.mapper.ObjectMapper;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
@ -69,6 +70,31 @@ public abstract class FilteredSearchContext extends SearchContext {
|
|||
this.in = in;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasStoredFields() {
|
||||
return in.hasStoredFields();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasStoredFieldsContext() {
|
||||
return in.hasStoredFieldsContext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean storedFieldsRequested() {
|
||||
return in.storedFieldsRequested();
|
||||
}
|
||||
|
||||
@Override
|
||||
public StoredFieldsContext storedFieldsContext() {
|
||||
return in.storedFieldsContext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext storedFieldsContext(StoredFieldsContext storedFieldsContext) {
|
||||
return in.storedFieldsContext(storedFieldsContext);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doClose() {
|
||||
in.doClose();
|
||||
|
@ -374,20 +400,6 @@ public abstract class FilteredSearchContext extends SearchContext {
|
|||
return in.size(size);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasFieldNames() {
|
||||
return in.hasFieldNames();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> fieldNames() {
|
||||
return in.fieldNames();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void emptyFieldNames() {
|
||||
in.emptyFieldNames();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean explain() {
|
||||
|
|
|
@ -100,9 +100,17 @@ public class InternalSearchHit implements SearchHit {
|
|||
|
||||
}
|
||||
|
||||
public InternalSearchHit(int docId) {
|
||||
this(docId, null, null, null);
|
||||
}
|
||||
|
||||
public InternalSearchHit(int docId, String id, Text type, Map<String, SearchHitField> fields) {
|
||||
this.docId = docId;
|
||||
this.id = new Text(id);
|
||||
if (id != null) {
|
||||
this.id = new Text(id);
|
||||
} else {
|
||||
this.id = null;
|
||||
}
|
||||
this.type = type;
|
||||
this.fields = fields;
|
||||
}
|
||||
|
@ -168,7 +176,7 @@ public class InternalSearchHit implements SearchHit {
|
|||
|
||||
@Override
|
||||
public String id() {
|
||||
return id.string();
|
||||
return id != null ? id.string() : null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -178,7 +186,7 @@ public class InternalSearchHit implements SearchHit {
|
|||
|
||||
@Override
|
||||
public String type() {
|
||||
return type.string();
|
||||
return type != null ? type.string() : null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -444,8 +452,12 @@ public class InternalSearchHit implements SearchHit {
|
|||
if (shard != null) {
|
||||
builder.field(Fields._INDEX, shard.indexText());
|
||||
}
|
||||
builder.field(Fields._TYPE, type);
|
||||
builder.field(Fields._ID, id);
|
||||
if (type != null) {
|
||||
builder.field(Fields._TYPE, type);
|
||||
}
|
||||
if (id != null) {
|
||||
builder.field(Fields._ID, id);
|
||||
}
|
||||
}
|
||||
if (version != -1) {
|
||||
builder.field(Fields._VERSION, version);
|
||||
|
@ -555,8 +567,8 @@ public class InternalSearchHit implements SearchHit {
|
|||
|
||||
public void readFrom(StreamInput in, InternalSearchHits.StreamContext context) throws IOException {
|
||||
score = in.readFloat();
|
||||
id = in.readText();
|
||||
type = in.readText();
|
||||
id = in.readOptionalText();
|
||||
type = in.readOptionalText();
|
||||
nestedIdentity = in.readOptionalStreamable(InternalNestedIdentity::new);
|
||||
version = in.readLong();
|
||||
source = in.readBytesReference();
|
||||
|
@ -664,8 +676,8 @@ public class InternalSearchHit implements SearchHit {
|
|||
|
||||
public void writeTo(StreamOutput out, InternalSearchHits.StreamContext context) throws IOException {
|
||||
out.writeFloat(score);
|
||||
out.writeText(id);
|
||||
out.writeText(type);
|
||||
out.writeOptionalText(id);
|
||||
out.writeOptionalText(type);
|
||||
out.writeOptionalStreamable(nestedIdentity);
|
||||
out.writeLong(version);
|
||||
out.writeBytesReference(source);
|
||||
|
|
|
@ -43,6 +43,7 @@ import org.elasticsearch.index.mapper.MapperService;
|
|||
import org.elasticsearch.index.mapper.ObjectMapper;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
@ -68,7 +69,6 @@ import java.util.ArrayList;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
/**
|
||||
|
@ -291,11 +291,18 @@ public abstract class SearchContext extends AbstractRefCounted implements Releas
|
|||
|
||||
public abstract SearchContext size(int size);
|
||||
|
||||
public abstract boolean hasFieldNames();
|
||||
public abstract boolean hasStoredFields();
|
||||
|
||||
public abstract List<String> fieldNames();
|
||||
public abstract boolean hasStoredFieldsContext();
|
||||
|
||||
public abstract void emptyFieldNames();
|
||||
/**
|
||||
* A shortcut function to see whether there is a storedFieldsContext and it says the fields are requested.
|
||||
*/
|
||||
public abstract boolean storedFieldsRequested();
|
||||
|
||||
public abstract StoredFieldsContext storedFieldsContext();
|
||||
|
||||
public abstract SearchContext storedFieldsContext(StoredFieldsContext storedFieldsContext);
|
||||
|
||||
public abstract boolean explain();
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.util.Counter;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.search.aggregations.SearchContextAggregations;
|
||||
import org.elasticsearch.search.fetch.FetchSearchResult;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
|
@ -33,8 +34,6 @@ import org.elasticsearch.search.rescore.RescoreSearchContext;
|
|||
import org.elasticsearch.search.sort.SortAndFormats;
|
||||
import org.elasticsearch.search.suggest.SuggestionSearchContext;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
|
@ -58,7 +57,7 @@ public class SubSearchContext extends FilteredSearchContext {
|
|||
private int docsIdsToLoadFrom;
|
||||
private int docsIdsToLoadSize;
|
||||
|
||||
private List<String> fieldNames;
|
||||
private StoredFieldsContext storedFields;
|
||||
private ScriptFieldsContext scriptFields;
|
||||
private FetchSourceContext fetchSourceContext;
|
||||
private SearchContextHighlight highlight;
|
||||
|
@ -239,21 +238,29 @@ public class SubSearchContext extends FilteredSearchContext {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean hasFieldNames() {
|
||||
return fieldNames != null;
|
||||
public boolean hasStoredFields() {
|
||||
return storedFields != null && storedFields.fieldNames() != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> fieldNames() {
|
||||
if (fieldNames == null) {
|
||||
fieldNames = new ArrayList<>();
|
||||
}
|
||||
return fieldNames;
|
||||
public boolean hasStoredFieldsContext() {
|
||||
return storedFields != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void emptyFieldNames() {
|
||||
this.fieldNames = Collections.emptyList();
|
||||
public boolean storedFieldsRequested() {
|
||||
return storedFields != null && storedFields.fetchFields();
|
||||
}
|
||||
|
||||
@Override
|
||||
public StoredFieldsContext storedFieldsContext() {
|
||||
return storedFields;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext storedFieldsContext(StoredFieldsContext storedFieldsContext) {
|
||||
this.storedFields = storedFieldsContext;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -18,20 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.sameInstance;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
|
@ -60,6 +46,21 @@ import org.elasticsearch.test.ESTestCase;
|
|||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.sameInstance;
|
||||
|
||||
public class InnerHitBuilderTests extends ESTestCase {
|
||||
|
||||
private static final int NUMBER_OF_TESTBUILDERS = 20;
|
||||
|
@ -221,7 +222,9 @@ public class InnerHitBuilderTests extends ESTestCase {
|
|||
innerHits.setExplain(randomBoolean());
|
||||
innerHits.setVersion(randomBoolean());
|
||||
innerHits.setTrackScores(randomBoolean());
|
||||
innerHits.setStoredFieldNames(randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16)));
|
||||
if (randomBoolean()) {
|
||||
innerHits.setStoredFieldNames(randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16)));
|
||||
}
|
||||
innerHits.setDocValueFields(randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16)));
|
||||
// Random script fields deduped on their field name.
|
||||
Map<String, SearchSourceBuilder.ScriptField> scriptFields = new HashMap<>();
|
||||
|
@ -344,12 +347,13 @@ public class InnerHitBuilderTests extends ESTestCase {
|
|||
HighlightBuilderTests::randomHighlighterBuilder));
|
||||
break;
|
||||
case 11:
|
||||
if (instance.getStoredFieldNames() == null || randomBoolean()) {
|
||||
instance.setStoredFieldNames(randomValueOtherThan(instance.getStoredFieldNames(), () -> {
|
||||
return randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16));
|
||||
}));
|
||||
if (instance.getStoredFieldsContext() == null || randomBoolean()) {
|
||||
List<String> previous = instance.getStoredFieldsContext() == null ?
|
||||
Collections.emptyList() : instance.getStoredFieldsContext().fieldNames();
|
||||
instance.setStoredFieldNames(randomValueOtherThan(previous,
|
||||
() -> randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16))));
|
||||
} else {
|
||||
instance.getStoredFieldNames().add(randomAsciiOfLengthBetween(1, 16));
|
||||
instance.getStoredFieldsContext().addFieldName(randomAsciiOfLengthBetween(1, 16));
|
||||
}
|
||||
break;
|
||||
default:
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.recovery;
|
|||
import com.carrotsearch.hppc.IntHashSet;
|
||||
import com.carrotsearch.hppc.procedures.IntProcedure;
|
||||
import org.apache.lucene.index.IndexFileNames;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.English;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
|
@ -219,7 +218,7 @@ public class RelocationIT extends ESIntegTestCase {
|
|||
for (int i = 0; i < 10; i++) {
|
||||
try {
|
||||
logger.info("--> START search test round {}", i + 1);
|
||||
SearchHits hits = client().prepareSearch("test").setQuery(matchAllQuery()).setSize((int) indexer.totalIndexedDocs()).setNoStoredFields().execute().actionGet().getHits();
|
||||
SearchHits hits = client().prepareSearch("test").setQuery(matchAllQuery()).setSize((int) indexer.totalIndexedDocs()).storedFields().execute().actionGet().getHits();
|
||||
ranOnce = true;
|
||||
if (hits.totalHits() != indexer.totalIndexedDocs()) {
|
||||
int[] hitIds = new int[(int) indexer.totalIndexedDocs()];
|
||||
|
|
|
@ -580,7 +580,7 @@ public class TopHitsIT extends ESIntegTestCase {
|
|||
topHits("hits").size(1)
|
||||
.highlighter(new HighlightBuilder().field("text"))
|
||||
.explain(true)
|
||||
.field("text")
|
||||
.storedField("text")
|
||||
.fieldDataField("field1")
|
||||
.scriptField("script", new Script("5", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, Collections.emptyMap()))
|
||||
.fetchSource("text", null)
|
||||
|
@ -956,4 +956,41 @@ public class TopHitsIT extends ESIntegTestCase {
|
|||
.get();
|
||||
assertNoFailures(response);
|
||||
}
|
||||
|
||||
public void testNoStoredFields() throws Exception {
|
||||
SearchResponse response = client()
|
||||
.prepareSearch("idx")
|
||||
.setTypes("type")
|
||||
.addAggregation(terms("terms")
|
||||
.executionHint(randomExecutionHint())
|
||||
.field(TERMS_AGGS_FIELD)
|
||||
.subAggregation(
|
||||
topHits("hits").storedField("_none_")
|
||||
)
|
||||
)
|
||||
.get();
|
||||
|
||||
assertSearchResponse(response);
|
||||
|
||||
Terms terms = response.getAggregations().get("terms");
|
||||
assertThat(terms, notNullValue());
|
||||
assertThat(terms.getName(), equalTo("terms"));
|
||||
assertThat(terms.getBuckets().size(), equalTo(5));
|
||||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
Terms.Bucket bucket = terms.getBucketByKey("val" + i);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat(key(bucket), equalTo("val" + i));
|
||||
assertThat(bucket.getDocCount(), equalTo(10L));
|
||||
TopHits topHits = bucket.getAggregations().get("hits");
|
||||
SearchHits hits = topHits.getHits();
|
||||
assertThat(hits.totalHits(), equalTo(10L));
|
||||
assertThat(hits.getHits().length, equalTo(3));
|
||||
for (SearchHit hit : hits) {
|
||||
assertThat(hit.source(), nullValue());
|
||||
assertThat(hit.id(), nullValue());
|
||||
assertThat(hit.type(), nullValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.elasticsearch.search.sort.SortBuilders;
|
|||
import org.elasticsearch.search.sort.SortOrder;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
@ -58,13 +59,25 @@ public class TopHitsTests extends BaseAggregationTestCase<TopHitsAggregationBuil
|
|||
if (randomBoolean()) {
|
||||
factory.trackScores(randomBoolean());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
int fieldsSize = randomInt(25);
|
||||
List<String> fields = new ArrayList<>(fieldsSize);
|
||||
for (int i = 0; i < fieldsSize; i++) {
|
||||
fields.add(randomAsciiOfLengthBetween(5, 50));
|
||||
}
|
||||
factory.fields(fields);
|
||||
switch (randomInt(3)) {
|
||||
case 0:
|
||||
break;
|
||||
case 1:
|
||||
factory.storedField("_none_");
|
||||
break;
|
||||
case 2:
|
||||
factory.storedFields(Collections.emptyList());
|
||||
break;
|
||||
case 3:
|
||||
int fieldsSize = randomInt(25);
|
||||
List<String> fields = new ArrayList<>(fieldsSize);
|
||||
for (int i = 0; i < fieldsSize; i++) {
|
||||
fields.add(randomAsciiOfLengthBetween(5, 50));
|
||||
}
|
||||
factory.storedFields(fields);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
int fieldDataFieldsSize = randomInt(25);
|
||||
|
|
|
@ -53,14 +53,12 @@ import org.elasticsearch.index.query.QueryParseContext;
|
|||
import org.elasticsearch.indices.IndicesModule;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.search.SearchRequestParsers;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilders;
|
||||
import org.elasticsearch.search.aggregations.AggregatorParsers;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilderTests;
|
||||
import org.elasticsearch.search.rescore.QueryRescoreBuilderTests;
|
||||
|
@ -73,7 +71,6 @@ import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType;
|
|||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.search.suggest.SuggestBuilderTests;
|
||||
import org.elasticsearch.search.suggest.Suggesters;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.IndexSettingsModule;
|
||||
|
@ -213,20 +210,26 @@ public class SearchSourceBuilderTests extends ESTestCase {
|
|||
// if (randomBoolean()) {
|
||||
// builder.defaultRescoreWindowSize(randomIntBetween(1, 100));
|
||||
// }
|
||||
if (randomBoolean()) {
|
||||
int fieldsSize = randomInt(25);
|
||||
List<String> fields = new ArrayList<>(fieldsSize);
|
||||
for (int i = 0; i < fieldsSize; i++) {
|
||||
fields.add(randomAsciiOfLengthBetween(5, 50));
|
||||
}
|
||||
builder.storedFields(fields);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
int fieldDataFieldsSize = randomInt(25);
|
||||
for (int i = 0; i < fieldDataFieldsSize; i++) {
|
||||
builder.docValueField(randomAsciiOfLengthBetween(5, 50));
|
||||
}
|
||||
|
||||
switch(randomInt(2)) {
|
||||
case 0:
|
||||
builder.storedFields();
|
||||
break;
|
||||
case 1:
|
||||
builder.storedField("_none_");
|
||||
break;
|
||||
case 2:
|
||||
int fieldsSize = randomInt(25);
|
||||
List<String> fields = new ArrayList<>(fieldsSize);
|
||||
for (int i = 0; i < fieldsSize; i++) {
|
||||
fields.add(randomAsciiOfLengthBetween(5, 50));
|
||||
}
|
||||
builder.storedFields(fields);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
|
||||
if (randomBoolean()) {
|
||||
int scriptFieldsSize = randomInt(25);
|
||||
for (int i = 0; i < scriptFieldsSize; i++) {
|
||||
|
@ -545,14 +548,14 @@ public class SearchSourceBuilderTests extends ESTestCase {
|
|||
|
||||
public void testAggsParsing() throws IOException {
|
||||
{
|
||||
String restContent = "{\n" + " " +
|
||||
"\"aggs\": {" +
|
||||
" \"test_agg\": {\n" +
|
||||
" " + "\"terms\" : {\n" +
|
||||
" \"field\": \"foo\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
String restContent = "{\n" + " " +
|
||||
"\"aggs\": {" +
|
||||
" \"test_agg\": {\n" +
|
||||
" " + "\"terms\" : {\n" +
|
||||
" \"field\": \"foo\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}\n";
|
||||
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
|
||||
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
|
||||
|
@ -561,14 +564,14 @@ public class SearchSourceBuilderTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
{
|
||||
String restContent = "{\n" +
|
||||
" \"aggregations\": {" +
|
||||
" \"test_agg\": {\n" +
|
||||
" \"terms\" : {\n" +
|
||||
" \"field\": \"foo\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
String restContent = "{\n" +
|
||||
" \"aggregations\": {" +
|
||||
" \"test_agg\": {\n" +
|
||||
" \"terms\" : {\n" +
|
||||
" \"field\": \"foo\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}\n";
|
||||
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
|
||||
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
|
||||
|
|
|
@ -0,0 +1,122 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.source;
|
||||
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.search.SearchContextException;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class MetadataFetchingIT extends ESIntegTestCase {
|
||||
public void testSimple() {
|
||||
assertAcked(prepareCreate("test"));
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "type1", "1").setSource("field", "value").execute().actionGet();
|
||||
refresh();
|
||||
|
||||
SearchResponse response = client()
|
||||
.prepareSearch("test")
|
||||
.storedFields("_none_")
|
||||
.setFetchSource(false)
|
||||
.get();
|
||||
assertThat(response.getHits().getAt(0).getId(), nullValue());
|
||||
assertThat(response.getHits().getAt(0).getType(), nullValue());
|
||||
assertThat(response.getHits().getAt(0).sourceAsString(), nullValue());
|
||||
|
||||
response = client()
|
||||
.prepareSearch("test")
|
||||
.storedFields("_none_")
|
||||
.get();
|
||||
assertThat(response.getHits().getAt(0).getId(), nullValue());
|
||||
assertThat(response.getHits().getAt(0).getType(), nullValue());
|
||||
assertThat(response.getHits().getAt(0).sourceAsString(), nullValue());
|
||||
}
|
||||
|
||||
public void testWithRouting() {
|
||||
assertAcked(prepareCreate("test"));
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "type1", "1").setSource("field", "value").setRouting("toto").execute().actionGet();
|
||||
refresh();
|
||||
|
||||
SearchResponse response = client()
|
||||
.prepareSearch("test")
|
||||
.storedFields("_none_")
|
||||
.setFetchSource(false)
|
||||
.get();
|
||||
assertThat(response.getHits().getAt(0).getId(), nullValue());
|
||||
assertThat(response.getHits().getAt(0).getType(), nullValue());
|
||||
assertThat(response.getHits().getAt(0).field("_routing"), nullValue());
|
||||
assertThat(response.getHits().getAt(0).sourceAsString(), nullValue());
|
||||
|
||||
response = client()
|
||||
.prepareSearch("test")
|
||||
.storedFields("_none_")
|
||||
.get();
|
||||
assertThat(response.getHits().getAt(0).getId(), nullValue());
|
||||
assertThat(response.getHits().getAt(0).getType(), nullValue());
|
||||
assertThat(response.getHits().getAt(0).sourceAsString(), nullValue());
|
||||
}
|
||||
|
||||
public void testInvalid() {
|
||||
assertAcked(prepareCreate("test"));
|
||||
ensureGreen();
|
||||
|
||||
index("test", "type1", "1", "field", "value");
|
||||
refresh();
|
||||
|
||||
{
|
||||
SearchPhaseExecutionException exc = expectThrows(SearchPhaseExecutionException.class,
|
||||
() -> client().prepareSearch("test").setFetchSource(true).storedFields("_none_").get());
|
||||
Throwable rootCause = ExceptionsHelper.unwrap(exc, SearchContextException.class);
|
||||
assertNotNull(rootCause);
|
||||
assertThat(rootCause.getClass(), equalTo(SearchContextException.class));
|
||||
assertThat(rootCause.getMessage(),
|
||||
equalTo("`stored_fields` cannot be disabled if _source is requested"));
|
||||
}
|
||||
{
|
||||
SearchPhaseExecutionException exc = expectThrows(SearchPhaseExecutionException.class,
|
||||
() -> client().prepareSearch("test").storedFields("_none_").setVersion(true).get());
|
||||
Throwable rootCause = ExceptionsHelper.unwrap(exc, SearchContextException.class);
|
||||
assertNotNull(rootCause);
|
||||
assertThat(rootCause.getClass(), equalTo(SearchContextException.class));
|
||||
assertThat(rootCause.getMessage(),
|
||||
equalTo("`stored_fields` cannot be disabled if version is requested"));
|
||||
}
|
||||
{
|
||||
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class,
|
||||
() -> client().prepareSearch("test").storedFields("_none_", "field1").setVersion(true).get());
|
||||
assertThat(exc.getMessage(),
|
||||
equalTo("cannot combine _none_ with other fields"));
|
||||
}
|
||||
{
|
||||
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class,
|
||||
() -> client().prepareSearch("test").storedFields("_none_").storedFields("field1").setVersion(true).get());
|
||||
assertThat(exc.getMessage(),
|
||||
equalTo("cannot combine _none_ with other fields"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -21,6 +21,7 @@ The top_hits aggregation returns regular search hits, because of this many per h
|
|||
* <<search-request-explain,Explain>>
|
||||
* <<search-request-named-queries-and-filters,Named filters and queries>>
|
||||
* <<search-request-source-filtering,Source filtering>>
|
||||
* <<search-request-stored-fields,Stored fields>>
|
||||
* <<search-request-script-fields,Script fields>>
|
||||
* <<search-request-docvalue-fields,Doc value fields>>
|
||||
* <<search-request-version,Include versions>>
|
||||
|
|
|
@ -82,7 +82,7 @@ The `fingerprint` analyzer accepts the following parameters:
|
|||
`stopwords`::
|
||||
|
||||
A pre-defined stop words list like `_english_` or an array containing a
|
||||
list of stop words. Defaults to `_none_`.
|
||||
list of stop words. Defaults to `\_none_`.
|
||||
|
||||
`stopwords_path`::
|
||||
|
||||
|
|
|
@ -162,7 +162,7 @@ The `pattern` analyzer accepts the following parameters:
|
|||
`stopwords`::
|
||||
|
||||
A pre-defined stop words list like `_english_` or an array containing a
|
||||
list of stop words. Defaults to `_none_`.
|
||||
list of stop words. Defaults to `\_none_`.
|
||||
|
||||
`stopwords_path`::
|
||||
|
||||
|
|
|
@ -145,7 +145,7 @@ The `standard` analyzer accepts the following parameters:
|
|||
`stopwords`::
|
||||
|
||||
A pre-defined stop words list like `_english_` or an array containing a
|
||||
list of stop words. Defaults to `_none_`.
|
||||
list of stop words. Defaults to `\_none_`.
|
||||
|
||||
`stopwords_path`::
|
||||
|
||||
|
|
|
@ -76,4 +76,4 @@ Elasticsearch provides the following predefined list of languages:
|
|||
`_portuguese_`, `_romanian_`, `_russian_`, `_sorani_`, `_spanish_`,
|
||||
`_swedish_`, `_thai_`, `_turkish_`.
|
||||
|
||||
For the empty stopwords list (to disable stopwords) use: `_none_`.
|
||||
For the empty stopwords list (to disable stopwords) use: `\_none_`.
|
||||
|
|
|
@ -53,3 +53,21 @@ Script fields can also be automatically detected and used as fields, so
|
|||
things like `_source.obj1.field1` can be used, though not recommended, as
|
||||
`obj1.field1` will work as well.
|
||||
|
||||
==== Disable stored fields entirely
|
||||
|
||||
To disable the stored fields (and metadata fields) entirely use: `\_none_`:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"stored_fields": "_none_",
|
||||
"query" : {
|
||||
"term" : { "user" : "kimchy" }
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
NOTE: <<search-request-source-filtering,`_source`>> and <<search-request-version, `version`>> parameters cannot be activated if `_none_` is used.
|
||||
|
||||
|
|
|
@ -89,10 +89,10 @@ final class RemoteRequestBuilders {
|
|||
params.put("sorts", sorts.toString());
|
||||
}
|
||||
}
|
||||
if (searchRequest.source().storedFields() != null && false == searchRequest.source().storedFields().isEmpty()) {
|
||||
StringBuilder fields = new StringBuilder(searchRequest.source().storedFields().get(0));
|
||||
for (int i = 1; i < searchRequest.source().storedFields().size(); i++) {
|
||||
fields.append(',').append(searchRequest.source().storedFields().get(i));
|
||||
if (searchRequest.source().storedFields() != null && false == searchRequest.source().storedFields().fieldNames().isEmpty()) {
|
||||
StringBuilder fields = new StringBuilder(searchRequest.source().storedFields().fieldNames().get(0));
|
||||
for (int i = 1; i < searchRequest.source().storedFields().fieldNames().size(); i++) {
|
||||
fields.append(',').append(searchRequest.source().storedFields().fieldNames().get(i));
|
||||
}
|
||||
String storedFieldsParamName = remoteVersion.before(Version.V_5_0_0_alpha4) ? "fields" : "stored_fields";
|
||||
params.put(storedFieldsParamName, fields.toString());
|
||||
|
|
|
@ -0,0 +1,44 @@
|
|||
setup:
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
body: { foo: bar }
|
||||
- do:
|
||||
indices.refresh:
|
||||
index: [test]
|
||||
|
||||
---
|
||||
"Stored fields":
|
||||
- do:
|
||||
search:
|
||||
index: test
|
||||
|
||||
- is_true: hits.hits.0._id
|
||||
- is_true: hits.hits.0._type
|
||||
- is_true: hits.hits.0._source
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: test
|
||||
body:
|
||||
stored_fields: []
|
||||
|
||||
- is_true: hits.hits.0._id
|
||||
- is_true: hits.hits.0._type
|
||||
- is_false: hits.hits.0._source
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: test
|
||||
body:
|
||||
stored_fields: "_none_"
|
||||
|
||||
- is_false: hits.hits.0._id
|
||||
- is_false: hits.hits.0._type
|
||||
- is_false: hits.hits.0._source
|
||||
|
|
@ -18,10 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.test;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.Query;
|
||||
|
@ -40,6 +36,7 @@ import org.elasticsearch.index.mapper.MapperService;
|
|||
import org.elasticsearch.index.mapper.ObjectMapper;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
@ -65,6 +62,10 @@ import org.elasticsearch.search.sort.SortAndFormats;
|
|||
import org.elasticsearch.search.suggest.SuggestionSearchContext;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class TestSearchContext extends SearchContext {
|
||||
|
||||
final BigArrays bigArrays;
|
||||
|
@ -430,17 +431,28 @@ public class TestSearchContext extends SearchContext {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean hasFieldNames() {
|
||||
public boolean hasStoredFields() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> fieldNames() {
|
||||
public boolean hasStoredFieldsContext() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean storedFieldsRequested() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public StoredFieldsContext storedFieldsContext() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void emptyFieldNames() {
|
||||
public SearchContext storedFieldsContext(StoredFieldsContext storedFieldsContext) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
Loading…
Reference in New Issue