Refactor how to determine if a field is metafield (#57378) (#57771)

Before to determine if a field is meta-field, a static method of MapperService
isMetadataField was used. This method was using an outdated static list
of meta-fields.

This PR instead changes this method to the instance method that
is also aware of meta-fields in all registered plugins.

Related #38373, #41656
Closes #24422
This commit is contained in:
Mayya Sharipova 2020-06-08 09:16:18 -04:00 committed by GitHub
parent 663702e609
commit 70e63a365a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
32 changed files with 232 additions and 295 deletions

View File

@ -20,8 +20,10 @@
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
@ -44,7 +46,7 @@ public class RankFeatureMetaFieldMapperTests extends ESSingleNodeTestCase {
protected Collection<Class<? extends Plugin>> getPlugins() { protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(MapperExtrasPlugin.class); return pluginList(MapperExtrasPlugin.class);
} }
public void testBasics() throws Exception { public void testBasics() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "rank_feature").endObject().endObject() .startObject("properties").startObject("field").field("type", "rank_feature").endObject().endObject()
@ -55,4 +57,18 @@ public class RankFeatureMetaFieldMapperTests extends ESSingleNodeTestCase {
assertEquals(mapping, mapper.mappingSource().toString()); assertEquals(mapping, mapper.mappingSource().toString());
assertNotNull(mapper.metadataMapper(RankFeatureMetaFieldMapper.class)); assertNotNull(mapper.metadataMapper(RankFeatureMetaFieldMapper.class));
} }
/**
* Check that meta-fields are picked from plugins (in this case MapperExtrasPlugin),
* and parsing of a document fails if the document contains these meta-fields.
*/
public void testDocumentParsingFailsOnMetaField() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc").endObject().endObject());
DocumentMapper mapper = parser.parse("_doc", new CompressedXContent(mapping));
String rfMetaField = RankFeatureMetaFieldMapper.CONTENT_TYPE;
BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(rfMetaField, 0).endObject());
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
mapper.parse(new SourceToParse("test", "_doc", "1", bytes, XContentType.JSON)));
assertTrue(e.getMessage().contains("Field ["+ rfMetaField + "] is a metadata field and cannot be added inside a document."));
}
} }

View File

@ -39,9 +39,7 @@ import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.IntStream; import java.util.stream.IntStream;
@ -104,13 +102,9 @@ final class PercolatorMatchedSlotSubFetchPhase implements FetchSubPhase {
continue; continue;
} }
Map<String, DocumentField> fields = hit.fieldsOrNull();
if (fields == null) {
fields = new HashMap<>();
hit.fields(fields);
}
IntStream slots = convertTopDocsToSlots(topDocs, rootDocsBySlot); IntStream slots = convertTopDocsToSlots(topDocs, rootDocsBySlot);
hit.setField(fieldName, new DocumentField(fieldName, slots.boxed().collect(Collectors.toList()))); // _percolator_document_slot fields are document fields and should be under "fields" section in a hit
hit.setDocumentField(fieldName, new DocumentField(fieldName, slots.boxed().collect(Collectors.toList())));
} }
} }
} }

View File

@ -34,7 +34,6 @@ import java.util.Collections;
import java.util.OptionalInt; import java.util.OptionalInt;
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
public class RatedSearchHitTests extends ESTestCase { public class RatedSearchHitTests extends ESTestCase {
@ -76,8 +75,7 @@ public class RatedSearchHitTests extends ESTestCase {
RatedSearchHit testItem = randomRatedSearchHit(); RatedSearchHit testItem = randomRatedSearchHit();
XContentType xContentType = randomFrom(XContentType.values()); XContentType xContentType = randomFrom(XContentType.values());
BytesReference originalBytes = toShuffledXContent(testItem, xContentType, ToXContent.EMPTY_PARAMS, randomBoolean()); BytesReference originalBytes = toShuffledXContent(testItem, xContentType, ToXContent.EMPTY_PARAMS, randomBoolean());
BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, null, random()); try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) {
RatedSearchHit parsedItem = RatedSearchHit.parse(parser); RatedSearchHit parsedItem = RatedSearchHit.parse(parser);
assertNotSame(testItem, parsedItem); assertNotSame(testItem, parsedItem);
assertEquals(testItem, parsedItem); assertEquals(testItem, parsedItem);

View File

@ -596,14 +596,12 @@ public class GetActionIT extends ESIntegTestCase {
String field = "field1.field2.field3.field4"; String field = "field1.field2.field3.field4";
GetResponse getResponse = client().prepareGet("my-index", "my-type", "1").setStoredFields(field).get(); GetResponse getResponse = client().prepareGet("my-index", "my-type", "1").setStoredFields(field).get();
assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getField(field).isMetadataField(), equalTo(false));
assertThat(getResponse.getField(field).getValues().size(), equalTo(2)); assertThat(getResponse.getField(field).getValues().size(), equalTo(2));
assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1")); assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1"));
assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2")); assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2"));
getResponse = client().prepareGet("my-index", "my-type", "1").setStoredFields(field).get(); getResponse = client().prepareGet("my-index", "my-type", "1").setStoredFields(field).get();
assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getField(field).isMetadataField(), equalTo(false));
assertThat(getResponse.getField(field).getValues().size(), equalTo(2)); assertThat(getResponse.getField(field).getValues().size(), equalTo(2));
assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1")); assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1"));
assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2")); assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2"));
@ -631,7 +629,6 @@ public class GetActionIT extends ESIntegTestCase {
getResponse = client().prepareGet("my-index", "my-type", "1").setStoredFields(field).get(); getResponse = client().prepareGet("my-index", "my-type", "1").setStoredFields(field).get();
assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getField(field).isMetadataField(), equalTo(false));
assertThat(getResponse.getField(field).getValues().size(), equalTo(2)); assertThat(getResponse.getField(field).getValues().size(), equalTo(2));
assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1")); assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1"));
assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2")); assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2"));

View File

@ -63,9 +63,7 @@ public class LegacyGetActionIT extends ESIntegTestCase {
.setStoredFields("field1") .setStoredFields("field1")
.get(); .get();
assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getField("field1").isMetadataField(), equalTo(false));
assertThat(getResponse.getField("field1").getValue().toString(), equalTo("value")); assertThat(getResponse.getField("field1").getValue().toString(), equalTo("value"));
assertThat(getResponse.getField("_routing").isMetadataField(), equalTo(true));
assertThat(getResponse.getField("_routing").getValue().toString(), equalTo("1")); assertThat(getResponse.getField("_routing").getValue().toString(), equalTo("1"));
} }
@ -78,9 +76,7 @@ public class LegacyGetActionIT extends ESIntegTestCase {
.setRouting("1") .setRouting("1")
.get(); .get();
assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getField("field1").isMetadataField(), equalTo(false));
assertThat(getResponse.getField("field1").getValue().toString(), equalTo("value")); assertThat(getResponse.getField("field1").getValue().toString(), equalTo("value"));
assertThat(getResponse.getField("_routing").isMetadataField(), equalTo(true));
assertThat(getResponse.getField("_routing").getValue().toString(), equalTo("1")); assertThat(getResponse.getField("_routing").getValue().toString(), equalTo("1"));
} }
} }

View File

@ -125,13 +125,10 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase {
return; return;
} }
String field = fetchSubPhaseBuilder.getField(); String field = fetchSubPhaseBuilder.getField();
if (hitContext.hit().fieldsOrNull() == null) {
hitContext.hit().fields(new HashMap<>());
}
DocumentField hitField = hitContext.hit().getFields().get(NAME); DocumentField hitField = hitContext.hit().getFields().get(NAME);
if (hitField == null) { if (hitField == null) {
hitField = new DocumentField(NAME, new ArrayList<>(1)); hitField = new DocumentField(NAME, new ArrayList<>(1));
hitContext.hit().setField(NAME, hitField); hitContext.hit().setDocumentField(NAME, hitField);
} }
TermVectorsRequest termVectorsRequest = new TermVectorsRequest(context.indexShard().shardId().getIndex().getName(), TermVectorsRequest termVectorsRequest = new TermVectorsRequest(context.indexShard().shardId().getIndex().getName(),
hitContext.hit().getType(), hitContext.hit().getId()); hitContext.hit().getType(), hitContext.hit().getId());

View File

@ -660,7 +660,6 @@ public class SearchFieldsIT extends ESIntegTestCase {
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).field("field1"), nullValue()); assertThat(searchResponse.getHits().getAt(0).field("field1"), nullValue());
assertThat(searchResponse.getHits().getAt(0).field("_routing").isMetadataField(), equalTo(true));
assertThat(searchResponse.getHits().getAt(0).field("_routing").getValue().toString(), equalTo("1")); assertThat(searchResponse.getHits().getAt(0).field("_routing").getValue().toString(), equalTo("1"));
} }
@ -736,7 +735,6 @@ public class SearchFieldsIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch("my-index").addStoredField(field).get(); SearchResponse searchResponse = client().prepareSearch("my-index").addStoredField(field).get();
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).field(field).isMetadataField(), equalTo(false));
assertThat(searchResponse.getHits().getAt(0).field(field).getValues().size(), equalTo(2)); assertThat(searchResponse.getHits().getAt(0).field(field).getValues().size(), equalTo(2));
assertThat(searchResponse.getHits().getAt(0).field(field).getValues().get(0).toString(), equalTo("value1")); assertThat(searchResponse.getHits().getAt(0).field(field).getValues().get(0).toString(), equalTo("value1"));
assertThat(searchResponse.getHits().getAt(0).field(field).getValues().get(1).toString(), equalTo("value2")); assertThat(searchResponse.getHits().getAt(0).field(field).getValues().get(1).toString(), equalTo("value2"));
@ -1223,7 +1221,6 @@ public class SearchFieldsIT extends ESIntegTestCase {
Map<String, DocumentField> fields = response.getHits().getAt(0).getFields(); Map<String, DocumentField> fields = response.getHits().getAt(0).getFields();
assertThat(fields.get("field1"), nullValue()); assertThat(fields.get("field1"), nullValue());
assertThat(fields.get("_routing").isMetadataField(), equalTo(true));
assertThat(fields.get("_routing").getValue().toString(), equalTo("1")); assertThat(fields.get("_routing").getValue().toString(), equalTo("1"));
} }
} }

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.get.GetResult;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import java.io.IOException; import java.io.IOException;
@ -87,13 +86,6 @@ public class DocumentField implements Writeable, ToXContentFragment, Iterable<Ob
return values; return values;
} }
/**
* @return The field is a metadata field
*/
public boolean isMetadataField() {
return MapperService.isMetadataField(name);
}
@Override @Override
public Iterator<Object> iterator() { public Iterator<Object> iterator() {
return values.iterator(); return values.iterator();

View File

@ -33,6 +33,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.IgnoredFieldMapper; import org.elasticsearch.index.mapper.IgnoredFieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.search.lookup.SourceLookup;
@ -97,7 +98,8 @@ public class GetResult implements Writeable, Iterable<DocumentField>, ToXContent
Map<String, DocumentField> fields = readFields(in); Map<String, DocumentField> fields = readFields(in);
documentFields = new HashMap<>(); documentFields = new HashMap<>();
metaFields = new HashMap<>(); metaFields = new HashMap<>();
splitFieldsByMetadata(fields, documentFields, metaFields); fields.forEach((fieldName, docField) ->
(MapperService.isMetadataFieldStatic(fieldName) ? metaFields : documentFields).put(fieldName, docField));
} }
} else { } else {
metaFields = Collections.emptyMap(); metaFields = Collections.emptyMap();
@ -363,7 +365,7 @@ public class GetResult implements Writeable, Iterable<DocumentField>, ToXContent
} else if (FOUND.equals(currentFieldName)) { } else if (FOUND.equals(currentFieldName)) {
found = parser.booleanValue(); found = parser.booleanValue();
} else { } else {
metaFields.put(currentFieldName, new DocumentField(currentFieldName, metaFields.put(currentFieldName, new DocumentField(currentFieldName,
Collections.singletonList(parser.objectText()))); Collections.singletonList(parser.objectText())));
} }
} else if (token == XContentParser.Token.START_OBJECT) { } else if (token == XContentParser.Token.START_OBJECT) {
@ -401,10 +403,10 @@ public class GetResult implements Writeable, Iterable<DocumentField>, ToXContent
} }
private Map<String, DocumentField> readFields(StreamInput in) throws IOException { private Map<String, DocumentField> readFields(StreamInput in) throws IOException {
Map<String, DocumentField> fields = null; Map<String, DocumentField> fields;
int size = in.readVInt(); int size = in.readVInt();
if (size == 0) { if (size == 0) {
fields = new HashMap<>(); fields = emptyMap();
} else { } else {
fields = new HashMap<>(size); fields = new HashMap<>(size);
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
@ -414,20 +416,6 @@ public class GetResult implements Writeable, Iterable<DocumentField>, ToXContent
} }
return fields; return fields;
} }
static void splitFieldsByMetadata(Map<String, DocumentField> fields, Map<String, DocumentField> outOther,
Map<String, DocumentField> outMetadata) {
if (fields == null) {
return;
}
for (Map.Entry<String, DocumentField> fieldEntry: fields.entrySet()) {
if (fieldEntry.getValue().isMetadataField()) {
outMetadata.put(fieldEntry.getKey(), fieldEntry.getValue());
} else {
outOther.put(fieldEntry.getKey(), fieldEntry.getValue());
}
}
}
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
@ -446,11 +434,11 @@ public class GetResult implements Writeable, Iterable<DocumentField>, ToXContent
writeFields(out, documentFields); writeFields(out, documentFields);
writeFields(out, metaFields); writeFields(out, metaFields);
} else { } else {
writeFields(out, this.getFields()); writeFields(out, this.getFields());
} }
} }
} }
private void writeFields(StreamOutput out, Map<String, DocumentField> fields) throws IOException { private void writeFields(StreamOutput out, Map<String, DocumentField> fields) throws IOException {
if (fields == null) { if (fields == null) {
out.writeVInt(0); out.writeVInt(0);

View File

@ -285,7 +285,7 @@ public final class ShardGetService extends AbstractIndexShardComponent {
documentFields = new HashMap<>(); documentFields = new HashMap<>();
metadataFields = new HashMap<>(); metadataFields = new HashMap<>();
for (Map.Entry<String, List<Object>> entry : fieldVisitor.fields().entrySet()) { for (Map.Entry<String, List<Object>> entry : fieldVisitor.fields().entrySet()) {
if (MapperService.isMetadataField(entry.getKey())) { if (mapperService.isMetadataField(entry.getKey())) {
metadataFields.put(entry.getKey(), new DocumentField(entry.getKey(), entry.getValue())); metadataFields.put(entry.getKey(), new DocumentField(entry.getKey(), entry.getValue()));
} else { } else {
documentFields.put(entry.getKey(), new DocumentField(entry.getKey(), entry.getValue())); documentFields.put(entry.getKey(), new DocumentField(entry.getKey(), entry.getValue()));

View File

@ -404,7 +404,7 @@ final class DocumentParser {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
paths = splitAndValidatePath(currentFieldName); paths = splitAndValidatePath(currentFieldName);
if (MapperService.isMetadataField(context.path().pathAsText(currentFieldName))) { if (context.mapperService().isMetadataField(context.path().pathAsText(currentFieldName))) {
throw new MapperParsingException("Field [" + currentFieldName + "] is a metadata field and cannot be added inside" throw new MapperParsingException("Field [" + currentFieldName + "] is a metadata field and cannot be added inside"
+ " a document. Use the index API request parameters."); + " a document. Use the index API request parameters.");
} else if (containsDisabledObjectMapper(mapper, paths)) { } else if (containsDisabledObjectMapper(mapper, paths)) {

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.message.ParameterizedMessage;
@ -56,6 +55,7 @@ import org.elasticsearch.index.analysis.TokenizerFactory;
import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.mapper.Mapper.BuilderContext;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.InvalidTypeNameException; import org.elasticsearch.indices.InvalidTypeNameException;
import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.indices.mapper.MapperRegistry;
import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextMapping;
@ -63,7 +63,6 @@ import org.elasticsearch.search.suggest.completion.context.ContextMapping;
import java.io.Closeable; import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
@ -121,14 +120,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
Setting.boolSetting("index.mapper.dynamic", INDEX_MAPPER_DYNAMIC_DEFAULT, Setting.boolSetting("index.mapper.dynamic", INDEX_MAPPER_DYNAMIC_DEFAULT,
Property.Dynamic, Property.IndexScope, Property.Deprecated); Property.Dynamic, Property.IndexScope, Property.Deprecated);
//TODO this needs to be cleaned up: _timestamp and _ttl are not supported anymore, _field_names, _seq_no, _version and _source are
//also missing, not sure if on purpose. See IndicesModule#getMetadataMappers
private static final String[] SORTED_META_FIELDS = new String[]{
"_id", IgnoredFieldMapper.NAME, "_index", "_routing", "_size", "_timestamp", "_ttl", "_type"
};
private static final ObjectHashSet<String> META_FIELDS = ObjectHashSet.from(SORTED_META_FIELDS);
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(MapperService.class)); private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(MapperService.class));
static final String DEFAULT_MAPPING_ERROR_MESSAGE = "[_default_] mappings are not allowed on new indices and should no " + static final String DEFAULT_MAPPING_ERROR_MESSAGE = "[_default_] mappings are not allowed on new indices and should no " +
"longer be used. See [https://www.elastic.co/guide/en/elasticsearch/reference/current/breaking-changes-7.0.html" + "longer be used. See [https://www.elastic.co/guide/en/elasticsearch/reference/current/breaking-changes-7.0.html" +
@ -146,6 +137,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
private boolean hasNested = false; // updated dynamically to true when a nested object is added private boolean hasNested = false; // updated dynamically to true when a nested object is added
private final DocumentMapperParser documentParser; private final DocumentMapperParser documentParser;
private final Version indexVersionCreated;
private final MapperAnalyzerWrapper indexAnalyzer; private final MapperAnalyzerWrapper indexAnalyzer;
private final MapperAnalyzerWrapper searchAnalyzer; private final MapperAnalyzerWrapper searchAnalyzer;
@ -161,6 +153,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
SimilarityService similarityService, MapperRegistry mapperRegistry, SimilarityService similarityService, MapperRegistry mapperRegistry,
Supplier<QueryShardContext> queryShardContextSupplier, BooleanSupplier idFieldDataEnabled) { Supplier<QueryShardContext> queryShardContextSupplier, BooleanSupplier idFieldDataEnabled) {
super(indexSettings); super(indexSettings);
this.indexVersionCreated = indexSettings.getIndexVersionCreated();
this.indexAnalyzers = indexAnalyzers; this.indexAnalyzers = indexAnalyzers;
this.fieldTypes = new FieldTypeLookup(); this.fieldTypes = new FieldTypeLookup();
this.documentParser = new DocumentMapperParser(indexSettings, this, xContentRegistry, similarityService, mapperRegistry, this.documentParser = new DocumentMapperParser(indexSettings, this, xContentRegistry, similarityService, mapperRegistry,
@ -812,14 +805,27 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
} }
/** /**
* @return Whether a field is a metadata field. * @return Whether a field is a metadata field
* Deserialization of SearchHit objects sent from pre 7.8 nodes and GetResults objects sent from pre 7.3 nodes,
* uses this method to divide fields into meta and document fields.
* TODO: remove in v 9.0
* @deprecated Use an instance method isMetadataField instead
*/ */
public static boolean isMetadataField(String fieldName) { @Deprecated
return META_FIELDS.contains(fieldName); public static boolean isMetadataFieldStatic(String fieldName) {
if (IndicesModule.getBuiltInMetadataFields().contains(fieldName)) {
return true;
}
// if a node had Size Plugin installed, _size field should also be considered a meta-field
return fieldName.equals("_size");
} }
public static String[] getAllMetaFields() { /**
return Arrays.copyOf(SORTED_META_FIELDS, SORTED_META_FIELDS.length); * @return Whether a field is a metadata field.
* this method considers all mapper plugins
*/
public boolean isMetadataField(String field) {
return mapperRegistry.isMetadataField(indexVersionCreated, field);
} }
/** /**

View File

@ -150,6 +150,8 @@ public class IndicesModule extends AbstractModule {
private static final Map<String, MetadataFieldMapper.TypeParser> builtInMetadataMappers = initBuiltInMetadataMappers(); private static final Map<String, MetadataFieldMapper.TypeParser> builtInMetadataMappers = initBuiltInMetadataMappers();
private static Set<String> builtInMetadataFields = Collections.unmodifiableSet(builtInMetadataMappers.keySet());
private static Map<String, MetadataFieldMapper.TypeParser> initBuiltInMetadataMappers() { private static Map<String, MetadataFieldMapper.TypeParser> initBuiltInMetadataMappers() {
Map<String, MetadataFieldMapper.TypeParser> builtInMetadataMappers; Map<String, MetadataFieldMapper.TypeParser> builtInMetadataMappers;
// Use a LinkedHashMap for metadataMappers because iteration order matters // Use a LinkedHashMap for metadataMappers because iteration order matters
@ -206,7 +208,7 @@ public class IndicesModule extends AbstractModule {
* Returns a set containing all of the builtin metadata fields * Returns a set containing all of the builtin metadata fields
*/ */
public static Set<String> getBuiltInMetadataFields() { public static Set<String> getBuiltInMetadataFields() {
return builtInMetadataMappers.keySet(); return builtInMetadataFields;
} }
private static Function<String, Predicate<String>> getFieldFilter(List<MapperPlugin> mapperPlugins) { private static Function<String, Predicate<String>> getFieldFilter(List<MapperPlugin> mapperPlugins) {

View File

@ -71,7 +71,6 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constru
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName;
import static org.elasticsearch.common.xcontent.XContentParserUtils.parseFieldsValue;
/** /**
* A single search hit. * A single search hit.
@ -97,7 +96,7 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
private BytesReference source; private BytesReference source;
private Map<String, DocumentField> documentFields; private Map<String, DocumentField> documentFields;
private Map<String, DocumentField> metaFields; private final Map<String, DocumentField> metaFields;
private Map<String, HighlightField> highlightFields = null; private Map<String, HighlightField> highlightFields = null;
@ -139,15 +138,8 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
} }
this.type = type; this.type = type;
this.nestedIdentity = nestedIdentity; this.nestedIdentity = nestedIdentity;
this.documentFields = documentFields; this.documentFields = documentFields == null ? emptyMap() : documentFields;
if (this.documentFields == null) { this.metaFields = metaFields == null ? emptyMap() : metaFields;
this.documentFields = new HashMap<>();
}
this.metaFields = metaFields;
if (this.metaFields == null) {
this.metaFields = new HashMap<>();
}
} }
public SearchHit(StreamInput in) throws IOException { public SearchHit(StreamInput in) throws IOException {
@ -175,7 +167,8 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
Map<String, DocumentField> fields = readFields(in); Map<String, DocumentField> fields = readFields(in);
documentFields = new HashMap<>(); documentFields = new HashMap<>();
metaFields = new HashMap<>(); metaFields = new HashMap<>();
SearchHit.splitFieldsByMetadata(fields, documentFields, metaFields); fields.forEach((fieldName, docField) ->
(MapperService.isMetadataFieldStatic(fieldName) ? metaFields : documentFields).put(fieldName, docField));
} }
int size = in.readVInt(); int size = in.readVInt();
@ -457,19 +450,21 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
* The hit field matching the given field name. * The hit field matching the given field name.
*/ */
public DocumentField field(String fieldName) { public DocumentField field(String fieldName) {
return getFields().get(fieldName); DocumentField result = documentFields.get(fieldName);
if (result != null) {
return result;
} else {
return metaFields.get(fieldName);
}
} }
/* /*
* Adds a new DocumentField to the map in case both parameters are not null. * Adds a new DocumentField to the map in case both parameters are not null.
* */ * */
public void setField(String fieldName, DocumentField field) { public void setDocumentField(String fieldName, DocumentField field) {
if (fieldName == null || field == null) return; if (fieldName == null || field == null) return;
if (field.isMetadataField()) { if (documentFields.size() == 0) this.documentFields = new HashMap<>();
this.metaFields.put(fieldName, field); this.documentFields.put(fieldName, field);
} else {
this.documentFields.put(fieldName, field);
}
} }
/** /**
@ -477,27 +472,13 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
* were required to be loaded. * were required to be loaded.
*/ */
public Map<String, DocumentField> getFields() { public Map<String, DocumentField> getFields() {
Map<String, DocumentField> fields = new HashMap<>(); if (metaFields.size() > 0 || documentFields.size() > 0) {
fields.putAll(metaFields); final Map<String, DocumentField> fields = new HashMap<>();
fields.putAll(documentFields); fields.putAll(metaFields);
return fields; fields.putAll(documentFields);
} return fields;
} else {
// returns the fields without handling null cases return emptyMap();
public Map<String, DocumentField> fieldsOrNull() {
return getFields();
}
public void fields(Map<String, DocumentField> fields) {
Objects.requireNonNull(fields);
this.metaFields = new HashMap<>();
this.documentFields = new HashMap<>();
for (Map.Entry<String, DocumentField> fieldEntry: fields.entrySet()) {
if (fieldEntry.getValue().isMetadataField()) {
this.metaFields.put(fieldEntry.getKey(), fieldEntry.getValue());
} else {
this.documentFields.put(fieldEntry.getKey(), fieldEntry.getValue());
}
} }
} }
@ -597,22 +578,6 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
this.innerHits = innerHits; this.innerHits = innerHits;
} }
public static void splitFieldsByMetadata(Map<String, DocumentField> fields,
Map<String, DocumentField> documentFields,
Map<String, DocumentField> metaFields) {
// documentFields and metaFields must be non-empty maps
if (fields == null) {
return;
}
for (Map.Entry<String, DocumentField> fieldEntry: fields.entrySet()) {
if (fieldEntry.getValue().isMetadataField()) {
metaFields.put(fieldEntry.getKey(), fieldEntry.getValue());
} else {
documentFields.put(fieldEntry.getKey(), fieldEntry.getValue());
}
}
}
public static class Fields { public static class Fields {
static final String _INDEX = "_index"; static final String _INDEX = "_index";
static final String _TYPE = "_type"; static final String _TYPE = "_type";
@ -732,6 +697,18 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
return builder; return builder;
} }
// All fields on the root level of the parsed SearhHit are interpreted as metadata fields
// public because we use it in a completion suggestion option
public static final ObjectParser.UnknownFieldConsumer<Map<String, Object>> unknownMetaFieldConsumer = (map, fieldName, fieldValue) -> {
Map<String, DocumentField> fieldMap = (Map<String, DocumentField>) map.computeIfAbsent(
METADATA_FIELDS, v -> new HashMap<String, DocumentField>());
if (fieldName.equals(IgnoredFieldMapper.NAME)) {
fieldMap.put(fieldName, new DocumentField(fieldName, (List<Object>) fieldValue));
} else {
fieldMap.put(fieldName, new DocumentField(fieldName, Collections.singletonList(fieldValue)));
}
};
/** /**
* This parser outputs a temporary map of the objects needed to create the * This parser outputs a temporary map of the objects needed to create the
* SearchHit instead of directly creating the SearchHit. The reason for this * SearchHit instead of directly creating the SearchHit. The reason for this
@ -742,7 +719,8 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
* of the included search hit. The output of the map is used to create the * of the included search hit. The output of the map is used to create the
* actual SearchHit instance via {@link #createFromMap(Map)} * actual SearchHit instance via {@link #createFromMap(Map)}
*/ */
private static final ObjectParser<Map<String, Object>, Void> MAP_PARSER = new ObjectParser<>("innerHitParser", true, HashMap::new); private static final ObjectParser<Map<String, Object>, Void> MAP_PARSER = new ObjectParser<>("innerHitParser",
unknownMetaFieldConsumer, HashMap::new);
static { static {
declareInnerHitsParseFields(MAP_PARSER); declareInnerHitsParseFields(MAP_PARSER);
@ -753,7 +731,6 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
} }
public static void declareInnerHitsParseFields(ObjectParser<Map<String, Object>, Void> parser) { public static void declareInnerHitsParseFields(ObjectParser<Map<String, Object>, Void> parser) {
declareMetadataFields(parser);
parser.declareString((map, value) -> map.put(Fields._TYPE, new Text(value)), new ParseField(Fields._TYPE)); parser.declareString((map, value) -> map.put(Fields._TYPE, new Text(value)), new ParseField(Fields._TYPE));
parser.declareString((map, value) -> map.put(Fields._INDEX, value), new ParseField(Fields._INDEX)); parser.declareString((map, value) -> map.put(Fields._INDEX, value), new ParseField(Fields._INDEX));
parser.declareString((map, value) -> map.put(Fields._ID, value), new ParseField(Fields._ID)); parser.declareString((map, value) -> map.put(Fields._ID, value), new ParseField(Fields._ID));
@ -852,40 +829,6 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
} }
} }
/**
* we need to declare parse fields for each metadata field, except for _ID, _INDEX and _TYPE which are
* handled individually. All other fields are parsed to an entry in the fields map
*/
private static void declareMetadataFields(ObjectParser<Map<String, Object>, Void> parser) {
/* TODO: This method and its usage in declareInnerHitsParseFields() must be replaced by
calling an UnknownFieldConsumer. All fields on the root level of the parsed SearhHit
should be interpreted as metadata fields.
*/
for (String metadatafield : MapperService.getAllMetaFields()) {
if (metadatafield.equals(Fields._ID) == false && metadatafield.equals(Fields._INDEX) == false
&& metadatafield.equals(Fields._TYPE) == false) {
if (metadatafield.equals(IgnoredFieldMapper.NAME)) {
parser.declareObjectArray((map, list) -> {
@SuppressWarnings("unchecked")
Map<String, DocumentField> fieldMap = (Map<String, DocumentField>) map.computeIfAbsent(METADATA_FIELDS,
v -> new HashMap<String, DocumentField>());
DocumentField field = new DocumentField(metadatafield, list);
fieldMap.put(field.getName(), field);
}, (p, c) -> parseFieldsValue(p),
new ParseField(metadatafield));
} else {
parser.declareField((map, field) -> {
@SuppressWarnings("unchecked")
Map<String, DocumentField> fieldMap = (Map<String, DocumentField>) map.computeIfAbsent(METADATA_FIELDS,
v -> new HashMap<String, DocumentField>());
fieldMap.put(field.getName(), field);
}, (p, c) -> new DocumentField(metadatafield, Collections.singletonList(parseFieldsValue(p))),
new ParseField(metadatafield), ValueType.VALUE);
}
}
}
}
private static Map<String, DocumentField> parseFields(XContentParser parser) throws IOException { private static Map<String, DocumentField> parseFields(XContentParser parser) throws IOException {
Map<String, DocumentField> fields = new HashMap<>(); Map<String, DocumentField> fields = new HashMap<>();
while (parser.nextToken() != XContentParser.Token.END_OBJECT) { while (parser.nextToken() != XContentParser.Token.END_OBJECT) {

View File

@ -69,6 +69,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import static java.util.Collections.emptyMap;
/** /**
* Fetch phase of a search request, used to fetch the actual top matching documents to be returned to the client, identified * Fetch phase of a search request, used to fetch the actual top matching documents to be returned to the client, identified
@ -211,15 +212,17 @@ public class FetchPhase implements SearchPhase {
if (fieldsVisitor == null) { if (fieldsVisitor == null) {
return new SearchHit(docId, null, typeText, null, null); return new SearchHit(docId, null, typeText, null, null);
} }
loadStoredFields(context.shardTarget(), subReaderContext, fieldsVisitor, subDocId);
Map<String, DocumentField> searchFields = getSearchFields(context, fieldsVisitor, subDocId, fieldsVisitor.postProcess(context.mapperService());
storedToRequestedFields, subReaderContext); SearchHit searchHit;
if (fieldsVisitor.fields().isEmpty() == false) {
Map<String, DocumentField> metaFields = new HashMap<>(); Map<String, DocumentField> docFields = new HashMap<>();
Map<String, DocumentField> documentFields = new HashMap<>(); Map<String, DocumentField> metaFields = new HashMap<>();
SearchHit.splitFieldsByMetadata(searchFields, documentFields, metaFields); fillDocAndMetaFields(context, fieldsVisitor, storedToRequestedFields, docFields, metaFields);
searchHit = new SearchHit(docId, fieldsVisitor.uid().id(), typeText, docFields, metaFields);
SearchHit searchHit = new SearchHit(docId, fieldsVisitor.uid().id(), typeText, documentFields, metaFields); } else {
searchHit = new SearchHit(docId, fieldsVisitor.uid().id(), typeText, emptyMap(), emptyMap());
}
// Set _source if requested. // Set _source if requested.
SourceLookup sourceLookup = context.lookup().source(); SourceLookup sourceLookup = context.lookup().source();
sourceLookup.setSegmentAndDocument(subReaderContext, subDocId); sourceLookup.setSegmentAndDocument(subReaderContext, subDocId);
@ -229,34 +232,6 @@ public class FetchPhase implements SearchPhase {
return searchHit; return searchHit;
} }
private Map<String, DocumentField> getSearchFields(SearchContext context,
FieldsVisitor fieldsVisitor,
int subDocId,
Map<String, Set<String>> storedToRequestedFields,
LeafReaderContext subReaderContext) {
loadStoredFields(context.shardTarget(), subReaderContext, fieldsVisitor, subDocId);
fieldsVisitor.postProcess(context.mapperService());
if (fieldsVisitor.fields().isEmpty()) {
return null;
}
Map<String, DocumentField> searchFields = new HashMap<>(fieldsVisitor.fields().size());
for (Map.Entry<String, List<Object>> entry : fieldsVisitor.fields().entrySet()) {
String storedField = entry.getKey();
List<Object> storedValues = entry.getValue();
if (storedToRequestedFields.containsKey(storedField)) {
for (String requestedField : storedToRequestedFields.get(storedField)) {
searchFields.put(requestedField, new DocumentField(requestedField, storedValues));
}
} else {
searchFields.put(storedField, new DocumentField(storedField, storedValues));
}
}
return searchFields;
}
private SearchHit createNestedSearchHit(SearchContext context, private SearchHit createNestedSearchHit(SearchContext context,
int nestedTopDocId, int nestedTopDocId,
int nestedSubDocId, int nestedSubDocId,
@ -281,11 +256,17 @@ public class FetchPhase implements SearchPhase {
source = null; source = null;
} }
Map<String, DocumentField> searchFields = null; Map<String, DocumentField> docFields = emptyMap();
Map<String, DocumentField> metaFields = emptyMap();
if (context.hasStoredFields() && !context.storedFieldsContext().fieldNames().isEmpty()) { if (context.hasStoredFields() && !context.storedFieldsContext().fieldNames().isEmpty()) {
FieldsVisitor nestedFieldsVisitor = new CustomFieldsVisitor(storedToRequestedFields.keySet(), false); FieldsVisitor nestedFieldsVisitor = new CustomFieldsVisitor(storedToRequestedFields.keySet(), false);
searchFields = getSearchFields(context, nestedFieldsVisitor, nestedSubDocId, loadStoredFields(context.shardTarget(), subReaderContext, nestedFieldsVisitor, nestedSubDocId);
storedToRequestedFields, subReaderContext); nestedFieldsVisitor.postProcess(context.mapperService());
if (nestedFieldsVisitor.fields().isEmpty() == false) {
docFields = new HashMap<>();
metaFields = new HashMap<>();
fillDocAndMetaFields(context, nestedFieldsVisitor, storedToRequestedFields, docFields, metaFields);
}
} }
DocumentMapper documentMapper = context.mapperService().documentMapper(); DocumentMapper documentMapper = context.mapperService().documentMapper();
@ -345,10 +326,7 @@ public class FetchPhase implements SearchPhase {
XContentType contentType = tuple.v1(); XContentType contentType = tuple.v1();
context.lookup().source().setSourceContentType(contentType); context.lookup().source().setSourceContentType(contentType);
} }
Map<String, DocumentField> metaFields = new HashMap<>(); return new SearchHit(nestedTopDocId, uid.id(), documentMapper.typeText(), nestedIdentity, docFields, metaFields);
Map<String, DocumentField> documentFields = new HashMap<>();
SearchHit.splitFieldsByMetadata(searchFields, documentFields, metaFields);
return new SearchHit(nestedTopDocId, uid.id(), documentMapper.typeText(), nestedIdentity, documentFields, metaFields);
} }
private SearchHit.NestedIdentity getInternalNestedIdentity(SearchContext context, int nestedSubDocId, private SearchHit.NestedIdentity getInternalNestedIdentity(SearchContext context, int nestedSubDocId,
@ -435,4 +413,28 @@ public class FetchPhase implements SearchPhase {
throw new FetchPhaseExecutionException(shardTarget, "Failed to fetch doc id [" + docId + "]", e); throw new FetchPhaseExecutionException(shardTarget, "Failed to fetch doc id [" + docId + "]", e);
} }
} }
private static void fillDocAndMetaFields(SearchContext context, FieldsVisitor fieldsVisitor,
Map<String, Set<String>> storedToRequestedFields,
Map<String, DocumentField> docFields, Map<String, DocumentField> metaFields) {
for (Map.Entry<String, List<Object>> entry : fieldsVisitor.fields().entrySet()) {
String storedField = entry.getKey();
List<Object> storedValues = entry.getValue();
if (storedToRequestedFields.containsKey(storedField)) {
for (String requestedField : storedToRequestedFields.get(storedField)) {
if (context.mapperService().isMetadataField(requestedField)) {
metaFields.put(requestedField, new DocumentField(requestedField, storedValues));
} else {
docFields.put(requestedField, new DocumentField(requestedField, storedValues));
}
}
} else {
if (context.mapperService().isMetadataField(storedField)) {
metaFields.put(storedField, new DocumentField(storedField, storedValues));
} else {
docFields.put(storedField, new DocumentField(storedField, storedValues));
}
}
}
}
} }

View File

@ -43,7 +43,6 @@ import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.Comparator; import java.util.Comparator;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
@ -143,13 +142,12 @@ public final class FetchDocValuesPhase implements FetchSubPhase {
binaryValues = data.getBytesValues(); binaryValues = data.getBytesValues();
} }
} }
if (hit.fieldsOrNull() == null) { DocumentField hitField = hit.field(field);
hit.fields(new HashMap<>(2));
}
DocumentField hitField = hit.getFields().get(field);
if (hitField == null) { if (hitField == null) {
hitField = new DocumentField(field, new ArrayList<>(2)); hitField = new DocumentField(field, new ArrayList<>(2));
hit.setField(field, hitField); // even if we request a doc values of a meta-field (e.g. _routing),
// docValues fields will still be document fields, and put under "fields" section of a hit.
hit.setDocumentField(field, hitField);
} }
final List<Object> values = hitField.getValues(); final List<Object> values = hitField.getValues();

View File

@ -34,7 +34,6 @@ import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.Comparator; import java.util.Comparator;
import java.util.HashMap;
import java.util.List; import java.util.List;
public final class ScriptFieldsPhase implements FetchSubPhase { public final class ScriptFieldsPhase implements FetchSubPhase {
@ -72,11 +71,8 @@ public final class ScriptFieldsPhase implements FetchSubPhase {
} }
throw e; throw e;
} }
if (hit.fieldsOrNull() == null) {
hit.fields(new HashMap<>(2));
}
String scriptFieldName = scriptFields.get(i).name(); String scriptFieldName = scriptFields.get(i).name();
DocumentField hitField = hit.getFields().get(scriptFieldName); DocumentField hitField = hit.field(scriptFieldName);
if (hitField == null) { if (hitField == null) {
final List<Object> values; final List<Object> values;
if (value instanceof Collection) { if (value instanceof Collection) {
@ -85,7 +81,8 @@ public final class ScriptFieldsPhase implements FetchSubPhase {
values = Collections.singletonList(value); values = Collections.singletonList(value);
} }
hitField = new DocumentField(scriptFieldName, values); hitField = new DocumentField(scriptFieldName, values);
hit.setField(scriptFieldName, hitField); // script fields are never meta-fields
hit.setDocumentField(scriptFieldName, hitField);
} }
} }

View File

@ -46,6 +46,7 @@ import java.util.Objects;
import java.util.Set; import java.util.Set;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.search.SearchHit.unknownMetaFieldConsumer;
import static org.elasticsearch.search.suggest.Suggest.COMPARATOR; import static org.elasticsearch.search.suggest.Suggest.COMPARATOR;
/** /**
@ -362,7 +363,7 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
} }
private static final ObjectParser<Map<String, Object>, Void> PARSER = new ObjectParser<>("CompletionOptionParser", private static final ObjectParser<Map<String, Object>, Void> PARSER = new ObjectParser<>("CompletionOptionParser",
true, HashMap::new); unknownMetaFieldConsumer, HashMap::new);
static { static {
SearchHit.declareInnerHitsParseFields(PARSER); SearchHit.declareInnerHitsParseFields(PARSER);

View File

@ -26,11 +26,8 @@ import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.IgnoredFieldMapper; import org.elasticsearch.index.mapper.IgnoredFieldMapper;
import org.elasticsearch.index.mapper.IndexFieldMapper; import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.RandomObjects; import org.elasticsearch.test.RandomObjects;
@ -38,6 +35,7 @@ import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.function.Predicate;
import java.util.function.Supplier; import java.util.function.Supplier;
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
@ -101,10 +99,14 @@ public class DocumentFieldTests extends ESTestCase {
} }
public static Tuple<DocumentField, DocumentField> randomDocumentField(XContentType xContentType) { public static Tuple<DocumentField, DocumentField> randomDocumentField(XContentType xContentType) {
if (randomBoolean()) { return randomDocumentField(xContentType, randomBoolean(), fieldName -> false); // don't exclude any meta-fields
String metaField = randomValueOtherThanMany(field -> field.equals(TypeFieldMapper.NAME) }
|| field.equals(IndexFieldMapper.NAME) || field.equals(IdFieldMapper.NAME),
() -> randomFrom(MapperService.getAllMetaFields())); public static Tuple<DocumentField, DocumentField> randomDocumentField(XContentType xContentType, boolean isMetafield,
Predicate<String> excludeMetaFieldFilter) {
if (isMetafield) {
String metaField = randomValueOtherThanMany(excludeMetaFieldFilter,
() -> randomFrom(IndicesModule.getBuiltInMetadataFields()));
DocumentField documentField; DocumentField documentField;
if (metaField.equals(IgnoredFieldMapper.NAME)) { if (metaField.equals(IgnoredFieldMapper.NAME)) {
int numValues = randomIntBetween(1, 3); int numValues = randomIntBetween(1, 3);

View File

@ -29,6 +29,12 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.IndexFieldMapper;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.index.mapper.VersionFieldMapper;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.RandomObjects; import org.elasticsearch.test.RandomObjects;
@ -38,6 +44,7 @@ import java.util.Arrays;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.function.Predicate;
import java.util.function.Supplier; import java.util.function.Supplier;
import static java.util.Collections.singletonList; import static java.util.Collections.singletonList;
@ -195,7 +202,7 @@ public class GetResultTests extends ESTestCase {
RandomObjects.randomSource(random()), getResult.getFields(), null)); RandomObjects.randomSource(random()), getResult.getFields(), null));
mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(),
getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(), getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(),
getResult.isExists(), getResult.internalSourceRef(), randomDocumentFields(XContentType.JSON).v1(), null)); getResult.isExists(), getResult.internalSourceRef(), randomDocumentFields(XContentType.JSON, randomBoolean()).v1(), null));
return randomFrom(mutations).get(); return randomFrom(mutations).get();
} }
@ -208,8 +215,8 @@ public class GetResultTests extends ESTestCase {
final long primaryTerm; final long primaryTerm;
final boolean exists; final boolean exists;
BytesReference source = null; BytesReference source = null;
Map<String, DocumentField> fields = null; Map<String, DocumentField> docFields = null;
Map<String, DocumentField> expectedFields = null; Map<String, DocumentField> expectedDocFields = null;
Map<String, DocumentField> metaFields = null; Map<String, DocumentField> metaFields = null;
Map<String, DocumentField> expectedMetaFields = null; Map<String, DocumentField> expectedMetaFields = null;
if (frequently()) { if (frequently()) {
@ -221,14 +228,13 @@ public class GetResultTests extends ESTestCase {
source = RandomObjects.randomSource(random()); source = RandomObjects.randomSource(random());
} }
if (randomBoolean()) { if (randomBoolean()) {
Tuple<Map<String, DocumentField>, Map<String, DocumentField>> tuple = randomDocumentFields(xContentType); Tuple<Map<String, DocumentField>, Map<String, DocumentField>> tuple = randomDocumentFields(xContentType, false);
fields = new HashMap<>(); docFields = tuple.v1();
metaFields = new HashMap<>(); expectedDocFields = tuple.v2();
GetResult.splitFieldsByMetadata(tuple.v1(), fields, metaFields);
expectedFields = new HashMap<>(); tuple = randomDocumentFields(xContentType, true);
expectedMetaFields = new HashMap<>(); metaFields = tuple.v1();
GetResult.splitFieldsByMetadata(tuple.v2(), expectedFields, expectedMetaFields); expectedMetaFields = tuple.v2();
} }
} else { } else {
seqNo = UNASSIGNED_SEQ_NO; seqNo = UNASSIGNED_SEQ_NO;
@ -236,18 +242,25 @@ public class GetResultTests extends ESTestCase {
version = -1; version = -1;
exists = false; exists = false;
} }
GetResult getResult = new GetResult(index, type, id, seqNo, primaryTerm, version, exists, source, fields, metaFields); GetResult getResult = new GetResult(index, type, id, seqNo, primaryTerm, version, exists, source, docFields, metaFields);
GetResult expectedGetResult = new GetResult(index, type, id, seqNo, primaryTerm, version, exists, source, GetResult expectedGetResult = new GetResult(index, type, id, seqNo, primaryTerm, version, exists, source,
expectedFields, expectedMetaFields); expectedDocFields, expectedMetaFields);
return Tuple.tuple(getResult, expectedGetResult); return Tuple.tuple(getResult, expectedGetResult);
} }
public static Tuple<Map<String, DocumentField>,Map<String, DocumentField>> randomDocumentFields(XContentType xContentType) { public static Tuple<Map<String, DocumentField>,Map<String, DocumentField>> randomDocumentFields(
int numFields = randomIntBetween(2, 10); XContentType xContentType, boolean isMetaFields) {
int numFields = isMetaFields? randomIntBetween(1, 3) : randomIntBetween(2, 10);
Map<String, DocumentField> fields = new HashMap<>(numFields); Map<String, DocumentField> fields = new HashMap<>(numFields);
Map<String, DocumentField> expectedFields = new HashMap<>(numFields); Map<String, DocumentField> expectedFields = new HashMap<>(numFields);
// As we are using this to construct a GetResult object that already contains
// index, type, id, version, seqNo, and source fields, we need to exclude them from random fields
Predicate<String> excludeMetaFieldFilter = field ->
field.equals(TypeFieldMapper.NAME) || field.equals(IndexFieldMapper.NAME) ||
field.equals(IdFieldMapper.NAME) || field.equals(VersionFieldMapper.NAME) ||
field.equals(SourceFieldMapper.NAME) || field.equals(SeqNoFieldMapper.NAME) ;
while (fields.size() < numFields) { while (fields.size() < numFields) {
Tuple<DocumentField, DocumentField> tuple = randomDocumentField(xContentType); Tuple<DocumentField, DocumentField> tuple = randomDocumentField(xContentType, isMetaFields, excludeMetaFieldFilter);
DocumentField getField = tuple.v1(); DocumentField getField = tuple.v1();
DocumentField expectedGetField = tuple.v2(); DocumentField expectedGetField = tuple.v2();
if (fields.putIfAbsent(getField.getName(), getField) == null) { if (fields.putIfAbsent(getField.getName(), getField) == null) {

View File

@ -1146,17 +1146,18 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
public void testDocumentContainsMetadataField() throws Exception { public void testDocumentContainsMetadataField() throws Exception {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc").endObject().endObject());
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); DocumentMapper mapper = mapperParser.parse("_doc", new CompressedXContent(mapping));
BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("_ttl", 0).endObject()); BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("_field_names", 0).endObject());
MapperParsingException e = expectThrows(MapperParsingException.class, () -> MapperParsingException e = expectThrows(MapperParsingException.class, () ->
mapper.parse(new SourceToParse("test", "type", "1", bytes, XContentType.JSON))); mapper.parse(new SourceToParse("test", "_doc", "1", bytes, XContentType.JSON)));
assertTrue(e.getMessage(), e.getMessage().contains("cannot be added inside a document")); assertTrue(e.getMessage(),
e.getMessage().contains("Field [_field_names] is a metadata field and cannot be added inside a document."));
BytesReference bytes2 = BytesReference.bytes(XContentFactory.jsonBuilder().startObject() BytesReference bytes2 = BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.field("foo._ttl", 0).endObject()); .field("foo._field_names", 0).endObject());
mapper.parse(new SourceToParse("test", "type", "1", bytes2, XContentType.JSON)); // parses without error mapper.parse(new SourceToParse("test", "_doc", "1", bytes2, XContentType.JSON)); // parses without error
} }
public void testSimpleMapper() throws Exception { public void testSimpleMapper() throws Exception {

View File

@ -73,20 +73,12 @@ public class SearchHitTests extends AbstractWireSerializingTestCase<SearchHit> {
if (randomBoolean()) { if (randomBoolean()) {
nestedIdentity = NestedIdentityTests.createTestItem(randomIntBetween(0, 2)); nestedIdentity = NestedIdentityTests.createTestItem(randomIntBetween(0, 2));
} }
Map<String, DocumentField> fields = new HashMap<>(); Map<String, DocumentField> metaFields = new HashMap<>();
Map<String, DocumentField> documentFields = new HashMap<>();
if (frequently()) { if (frequently()) {
fields = new HashMap<>();
if (randomBoolean()) { if (randomBoolean()) {
fields = GetResultTests.randomDocumentFields(xContentType).v2(); metaFields = GetResultTests.randomDocumentFields(xContentType, true).v2();
} documentFields = GetResultTests.randomDocumentFields(xContentType, false).v2();
}
HashMap<String, DocumentField> metaFields = new HashMap<>();
HashMap<String, DocumentField> documentFields = new HashMap<>();
for (Map.Entry<String, DocumentField> fieldEntry: fields.entrySet()) {
if (fieldEntry.getValue().isMetadataField()) {
metaFields.put(fieldEntry.getKey(), fieldEntry.getValue());
} else {
documentFields.put(fieldEntry.getKey(), fieldEntry.getValue());
} }
} }
@ -184,14 +176,15 @@ public class SearchHitTests extends AbstractWireSerializingTestCase<SearchHit> {
* objects allow arbitrary keys (the field names that are queries). Also we want to exclude * objects allow arbitrary keys (the field names that are queries). Also we want to exclude
* to add anything under "_source" since it is not parsed, and avoid complexity by excluding * to add anything under "_source" since it is not parsed, and avoid complexity by excluding
* everything under "inner_hits". They are also keyed by arbitrary names and contain SearchHits, * everything under "inner_hits". They are also keyed by arbitrary names and contain SearchHits,
* which are already tested elsewhere. * which are already tested elsewhere. We also exclude the root level, as all unknown fields
* on a root level are interpreted as meta-fields and will be kept.
*/ */
public void testFromXContentLenientParsing() throws IOException { public void testFromXContentLenientParsing() throws IOException {
XContentType xContentType = randomFrom(XContentType.values()); XContentType xContentType = randomFrom(XContentType.values());
SearchHit searchHit = createTestItem(xContentType, true, true); SearchHit searchHit = createTestItem(xContentType, true, true);
BytesReference originalBytes = toXContent(searchHit, xContentType, true); BytesReference originalBytes = toXContent(searchHit, xContentType, true);
Predicate<String> pathsToExclude = path -> (path.endsWith("highlight") || path.endsWith("fields") || path.contains("_source") Predicate<String> pathsToExclude = path -> (path.endsWith("highlight") || path.endsWith("fields") || path.contains("_source")
|| path.contains("inner_hits")); || path.contains("inner_hits") || path.isEmpty());
BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, pathsToExclude, random()); BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, pathsToExclude, random());
SearchHit parsed; SearchHit parsed;

View File

@ -229,6 +229,9 @@ public class AggregationsTests extends ESTestCase {
* *
* - we cannot insert into ExtendedMatrixStats "covariance" or "correlation" fields, their syntax is strict * - we cannot insert into ExtendedMatrixStats "covariance" or "correlation" fields, their syntax is strict
* *
* - we cannot insert random values in top_hits, as all unknown fields
* on a root level of SearchHit are interpreted as meta-fields and will be kept
*
* - exclude "key", it can be an array of objects and we need strict values * - exclude "key", it can be an array of objects and we need strict values
*/ */
Predicate<String> excludes = path -> (path.isEmpty() || path.endsWith("aggregations") Predicate<String> excludes = path -> (path.isEmpty() || path.endsWith("aggregations")
@ -236,7 +239,8 @@ public class AggregationsTests extends ESTestCase {
|| path.endsWith(Aggregation.CommonFields.BUCKETS.getPreferredName()) || path.endsWith(Aggregation.CommonFields.BUCKETS.getPreferredName())
|| path.endsWith(CommonFields.VALUES.getPreferredName()) || path.endsWith("covariance") || path.endsWith("correlation") || path.endsWith(CommonFields.VALUES.getPreferredName()) || path.endsWith("covariance") || path.endsWith("correlation")
|| path.contains(CommonFields.VALUE.getPreferredName()) || path.contains(CommonFields.VALUE.getPreferredName())
|| path.endsWith(CommonFields.KEY.getPreferredName())); || path.endsWith(CommonFields.KEY.getPreferredName()))
|| path.contains("top_hits");
mutated = insertRandomFields(xContentType, originalBytes, excludes, random()); mutated = insertRandomFields(xContentType, originalBytes, excludes, random());
} else { } else {
mutated = originalBytes; mutated = originalBytes;

View File

@ -85,9 +85,11 @@ public class CompletionSuggestionOptionTests extends ESTestCase {
if (addRandomFields) { if (addRandomFields) {
// "contexts" is an object consisting of key/array pairs, we shouldn't add anything random there // "contexts" is an object consisting of key/array pairs, we shouldn't add anything random there
// also there can be inner search hits fields inside this option, we need to exclude another couple of paths // also there can be inner search hits fields inside this option, we need to exclude another couple of paths
// where we cannot add random stuff // where we cannot add random stuff. We also exclude the root level, this is done for SearchHits as all unknown fields
// for SearchHit on a root level are interpreted as meta-fields and will be kept
Predicate<String> excludeFilter = (path) -> (path.endsWith(CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName()) Predicate<String> excludeFilter = (path) -> (path.endsWith(CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName())
|| path.endsWith("highlight") || path.endsWith("fields") || path.contains("_source") || path.contains("inner_hits")); || path.endsWith("highlight") || path.endsWith("fields") || path.contains("_source") || path.contains("inner_hits")
|| path.isEmpty());
mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random()); mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random());
} else { } else {
mutated = originalBytes; mutated = originalBytes;

View File

@ -102,9 +102,12 @@ public class SuggestionEntryTests extends ESTestCase {
// "contexts" is an object consisting of key/array pairs, we shouldn't add anything random there // "contexts" is an object consisting of key/array pairs, we shouldn't add anything random there
// also there can be inner search hits fields inside this option, we need to exclude another couple of paths // also there can be inner search hits fields inside this option, we need to exclude another couple of paths
// where we cannot add random stuff // where we cannot add random stuff
// exclude "options" which contain SearchHits,
// on root level of SearchHit fields are interpreted as meta-fields and will be kept
Predicate<String> excludeFilter = ( Predicate<String> excludeFilter = (
path) -> (path.endsWith(CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName()) || path.endsWith("highlight") path) -> (path.endsWith(CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName()) || path.endsWith("highlight")
|| path.endsWith("fields") || path.contains("_source") || path.contains("inner_hits")); || path.endsWith("fields") || path.contains("_source") || path.contains("inner_hits")
|| path.contains("options"));
mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random()); mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random());
} else { } else {
mutated = originalBytes; mutated = originalBytes;

View File

@ -122,9 +122,12 @@ public class SuggestionTests extends ESTestCase {
// - "contexts" is an object consisting of key/array pairs, we shouldn't add anything random there // - "contexts" is an object consisting of key/array pairs, we shouldn't add anything random there
// - there can be inner search hits fields inside this option where we cannot add random stuff // - there can be inner search hits fields inside this option where we cannot add random stuff
// - the root object should be excluded since it contains the named suggestion arrays // - the root object should be excluded since it contains the named suggestion arrays
// We also exclude options that contain SearchHits, as all unknown fields
// on a root level of SearchHit are interpreted as meta-fields and will be kept.
Predicate<String> excludeFilter = path -> (path.isEmpty() Predicate<String> excludeFilter = path -> (path.isEmpty()
|| path.endsWith(CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName()) || path.endsWith("highlight") || path.endsWith(CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName()) || path.endsWith("highlight")
|| path.endsWith("fields") || path.contains("_source") || path.contains("inner_hits")); || path.endsWith("fields") || path.contains("_source") || path.contains("inner_hits")
|| path.contains("options"));
mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random()); mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random());
} else { } else {
mutated = originalBytes; mutated = originalBytes;

View File

@ -523,9 +523,12 @@ public abstract class InternalAggregationTestCase<T extends InternalAggregation>
* objects, they are used with "keyed" aggregations and contain * objects, they are used with "keyed" aggregations and contain
* named bucket objects. Any new named object on this level should * named bucket objects. Any new named object on this level should
* also be a bucket and be parsed as such. * also be a bucket and be parsed as such.
*
* we also exclude top_hits that contain SearchHits, as all unknown fields
* on a root level of SearchHit are interpreted as meta-fields and will be kept.
*/ */
Predicate<String> basicExcludes = path -> path.isEmpty() || path.endsWith(Aggregation.CommonFields.META.getPreferredName()) Predicate<String> basicExcludes = path -> path.isEmpty() || path.endsWith(Aggregation.CommonFields.META.getPreferredName())
|| path.endsWith(Aggregation.CommonFields.BUCKETS.getPreferredName()); || path.endsWith(Aggregation.CommonFields.BUCKETS.getPreferredName()) || path.contains("top_hits");
Predicate<String> excludes = basicExcludes.or(excludePathsFromXContentInsertion()); Predicate<String> excludes = basicExcludes.or(excludePathsFromXContentInsertion());
mutated = insertRandomFields(xContentType, originalBytes, excludes, random()); mutated = insertRandomFields(xContentType, originalBytes, excludes, random());
} else { } else {

View File

@ -14,11 +14,11 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.FieldNamesFieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.license.XPackLicenseState.Feature; import org.elasticsearch.license.XPackLicenseState.Feature;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
@ -45,7 +45,7 @@ public class SecurityIndexReaderWrapperUnitTests extends ESTestCase {
private static final Set<String> META_FIELDS; private static final Set<String> META_FIELDS;
static { static {
final Set<String> metaFields = new HashSet<>(Arrays.asList(MapperService.getAllMetaFields())); final Set<String> metaFields = new HashSet<>(IndicesModule.getBuiltInMetadataFields());
metaFields.add(SourceFieldMapper.NAME); metaFields.add(SourceFieldMapper.NAME);
metaFields.add(FieldNamesFieldMapper.NAME); metaFields.add(FieldNamesFieldMapper.NAME);
metaFields.add(SeqNoFieldMapper.NAME); metaFields.add(SeqNoFieldMapper.NAME);

View File

@ -505,12 +505,11 @@ public class ScrollDataExtractorTests extends ESTestCase {
when(searchResponse.getScrollId()).thenReturn(randomAlphaOfLength(1000)); when(searchResponse.getScrollId()).thenReturn(randomAlphaOfLength(1000));
List<SearchHit> hits = new ArrayList<>(); List<SearchHit> hits = new ArrayList<>();
for (int i = 0; i < timestamps.size(); i++) { for (int i = 0; i < timestamps.size(); i++) {
SearchHit hit = new SearchHit(randomInt());
Map<String, DocumentField> fields = new HashMap<>(); Map<String, DocumentField> fields = new HashMap<>();
fields.put(extractedFields.timeField(), new DocumentField("time", Collections.singletonList(timestamps.get(i)))); fields.put(extractedFields.timeField(), new DocumentField("time", Collections.singletonList(timestamps.get(i))));
fields.put("field_1", new DocumentField("field_1", Collections.singletonList(field1Values.get(i)))); fields.put("field_1", new DocumentField("field_1", Collections.singletonList(field1Values.get(i))));
fields.put("field_2", new DocumentField("field_2", Collections.singletonList(field2Values.get(i)))); fields.put("field_2", new DocumentField("field_2", Collections.singletonList(field2Values.get(i))));
hit.fields(fields); SearchHit hit = new SearchHit(randomInt(), null, null, fields, null);
hits.add(hit); hits.add(hit);
} }
SearchHits searchHits = new SearchHits(hits.toArray(new SearchHit[0]), SearchHits searchHits = new SearchHits(hits.toArray(new SearchHit[0]),

View File

@ -23,8 +23,8 @@ public class SearchHitBuilder {
private final Map<String, DocumentField> fields; private final Map<String, DocumentField> fields;
public SearchHitBuilder(int docId) { public SearchHitBuilder(int docId) {
hit = new SearchHit(docId);
fields = new HashMap<>(); fields = new HashMap<>();
hit = new SearchHit(docId, null, null, fields, null);
} }
public SearchHitBuilder addField(String name, Object value) { public SearchHitBuilder addField(String name, Object value) {
@ -42,9 +42,6 @@ public class SearchHitBuilder {
} }
public SearchHit build() { public SearchHit build() {
if (!fields.isEmpty()) {
hit.fields(fields);
}
return hit; return hit;
} }
} }

View File

@ -80,10 +80,9 @@ public class ComputingExtractorTests extends AbstractSqlWireSerializingTestCase<
for (int i = 0; i < times; i++) { for (int i = 0; i < times; i++) {
double value = randomDouble(); double value = randomDouble();
double expected = Math.log(value); double expected = Math.log(value);
SearchHit hit = new SearchHit(1);
DocumentField field = new DocumentField(fieldName, singletonList(value)); DocumentField field = new DocumentField(fieldName, singletonList(value));
hit.fields(singletonMap(fieldName, field)); SearchHit hit = new SearchHit(1, null, null, singletonMap(fieldName, field), null);
assertEquals(expected, extractor.process(hit)); assertEquals(expected, extractor.process(hit));
} }
} }
} }

View File

@ -92,9 +92,8 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase<F
documentFieldValues.add(randomValue()); documentFieldValues.add(randomValue());
} }
SearchHit hit = new SearchHit(1);
DocumentField field = new DocumentField(fieldName, documentFieldValues); DocumentField field = new DocumentField(fieldName, documentFieldValues);
hit.fields(singletonMap(fieldName, field)); SearchHit hit = new SearchHit(1, null, null, singletonMap(fieldName, field), null);
Object result = documentFieldValues.isEmpty() ? null : documentFieldValues.get(0); Object result = documentFieldValues.isEmpty() ? null : documentFieldValues.get(0);
assertEquals(result, extractor.extract(hit)); assertEquals(result, extractor.extract(hit));
} }
@ -153,9 +152,8 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase<F
if (randomBoolean()) { if (randomBoolean()) {
documentFieldValues.add(randomValue()); documentFieldValues.add(randomValue());
} }
SearchHit hit = new SearchHit(1);
DocumentField field = new DocumentField(fieldName, documentFieldValues); DocumentField field = new DocumentField(fieldName, documentFieldValues);
hit.fields(singletonMap(fieldName, field)); SearchHit hit = new SearchHit(1, null, null, singletonMap(fieldName, field), null);
Object result = documentFieldValues.isEmpty() ? null : documentFieldValues.get(0); Object result = documentFieldValues.isEmpty() ? null : documentFieldValues.get(0);
assertEquals(result, extractor.extract(hit)); assertEquals(result, extractor.extract(hit));
} }
@ -165,9 +163,8 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase<F
ZoneId zoneId = randomZone(); ZoneId zoneId = randomZone();
long millis = 1526467911780L; long millis = 1526467911780L;
List<Object> documentFieldValues = Collections.singletonList(Long.toString(millis)); List<Object> documentFieldValues = Collections.singletonList(Long.toString(millis));
SearchHit hit = new SearchHit(1);
DocumentField field = new DocumentField("my_date_field", documentFieldValues); DocumentField field = new DocumentField("my_date_field", documentFieldValues);
hit.fields(singletonMap("my_date_field", field)); SearchHit hit = new SearchHit(1, null, null, singletonMap("my_date_field", field), null);
FieldHitExtractor extractor = new FieldHitExtractor("my_date_field", DATETIME, zoneId, true); FieldHitExtractor extractor = new FieldHitExtractor("my_date_field", DATETIME, zoneId, true);
assertEquals(DateUtils.asDateTime(millis, zoneId), extractor.extract(hit)); assertEquals(DateUtils.asDateTime(millis, zoneId), extractor.extract(hit));
} }
@ -204,9 +201,8 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase<F
public void testMultiValuedDocValue() { public void testMultiValuedDocValue() {
String fieldName = randomAlphaOfLength(5); String fieldName = randomAlphaOfLength(5);
FieldHitExtractor fe = getFieldHitExtractor(fieldName, true); FieldHitExtractor fe = getFieldHitExtractor(fieldName, true);
SearchHit hit = new SearchHit(1);
DocumentField field = new DocumentField(fieldName, asList("a", "b")); DocumentField field = new DocumentField(fieldName, asList("a", "b"));
hit.fields(singletonMap(fieldName, field)); SearchHit hit = new SearchHit(1, null, null, singletonMap(fieldName, field), null);
QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extract(hit)); QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extract(hit));
assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported"));
} }
@ -563,9 +559,8 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase<F
public void testGeoPointExtractionFromDocValues() { public void testGeoPointExtractionFromDocValues() {
String fieldName = randomAlphaOfLength(5); String fieldName = randomAlphaOfLength(5);
FieldHitExtractor fe = new FieldHitExtractor(fieldName, GEO_POINT, UTC, true); FieldHitExtractor fe = new FieldHitExtractor(fieldName, GEO_POINT, UTC, true);
SearchHit hit = new SearchHit(1);
DocumentField field = new DocumentField(fieldName, singletonList("2, 1")); DocumentField field = new DocumentField(fieldName, singletonList("2, 1"));
hit.fields(singletonMap(fieldName, field)); SearchHit hit = new SearchHit(1, null, null, singletonMap(fieldName, field), null);
assertEquals(new GeoShape(1, 2), fe.extract(hit)); assertEquals(new GeoShape(1, 2), fe.extract(hit));
hit = new SearchHit(1); hit = new SearchHit(1);
assertNull(fe.extract(hit)); assertNull(fe.extract(hit));
@ -573,10 +568,9 @@ public class FieldHitExtractorTests extends AbstractSqlWireSerializingTestCase<F
public void testGeoPointExtractionFromMultipleDocValues() { public void testGeoPointExtractionFromMultipleDocValues() {
String fieldName = randomAlphaOfLength(5); String fieldName = randomAlphaOfLength(5);
SearchHit hit = new SearchHit(1);
FieldHitExtractor fe = new FieldHitExtractor(fieldName, GEO_POINT, UTC, true); FieldHitExtractor fe = new FieldHitExtractor(fieldName, GEO_POINT, UTC, true);
SearchHit hit = new SearchHit(1, null, null, singletonMap(fieldName,
hit.fields(singletonMap(fieldName, new DocumentField(fieldName, Arrays.asList("2,1", "3,4")))); new DocumentField(fieldName, Arrays.asList("2,1", "3,4"))), null);
QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extract(hit)); QlIllegalArgumentException ex = expectThrows(QlIllegalArgumentException.class, () -> fe.extract(hit));
assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported"));