Correct how meta-field is defined for pre 7.8 hits (#57951)
We keep a static list of meta-fields: META_FIELDS_BEFORE_7_8 as it was before. This is done to ensure the backwards compatability with pre 7.8 nodes. Closes #57831
This commit is contained in:
parent
c518670f83
commit
8bd0147ba7
|
@ -44,8 +44,8 @@ setup:
|
|||
"Nested doc version and seqIDs":
|
||||
|
||||
- skip:
|
||||
version: "all" #" - 6.99.99"
|
||||
reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/57831"#"Triggers warnings before 7.0"
|
||||
version: " - 6.99.99"
|
||||
reason: "Triggers warnings before 7.0"
|
||||
|
||||
- do:
|
||||
index:
|
||||
|
|
|
@ -99,7 +99,7 @@ public class GetResult implements Writeable, Iterable<DocumentField>, ToXContent
|
|||
documentFields = new HashMap<>();
|
||||
metaFields = new HashMap<>();
|
||||
fields.forEach((fieldName, docField) ->
|
||||
(MapperService.isMetadataFieldStatic(fieldName) ? metaFields : documentFields).put(fieldName, docField));
|
||||
(MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField));
|
||||
}
|
||||
} else {
|
||||
metaFields = Collections.emptyMap();
|
||||
|
|
|
@ -55,7 +55,6 @@ import org.elasticsearch.index.analysis.TokenizerFactory;
|
|||
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.indices.IndicesModule;
|
||||
import org.elasticsearch.indices.InvalidTypeNameException;
|
||||
import org.elasticsearch.indices.mapper.MapperRegistry;
|
||||
import org.elasticsearch.search.suggest.completion.context.ContextMapping;
|
||||
|
@ -63,6 +62,7 @@ import org.elasticsearch.search.suggest.completion.context.ContextMapping;
|
|||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
|
@ -119,6 +119,11 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
public static final Setting<Boolean> INDEX_MAPPER_DYNAMIC_SETTING =
|
||||
Setting.boolSetting("index.mapper.dynamic", INDEX_MAPPER_DYNAMIC_DEFAULT,
|
||||
Property.Dynamic, Property.IndexScope, Property.Deprecated);
|
||||
// Deprecated set of meta-fields, for checking if a field is meta, use an instance method isMetadataField instead
|
||||
@Deprecated
|
||||
public static final Set<String> META_FIELDS_BEFORE_7DOT8 =
|
||||
Collections.unmodifiableSet(new HashSet<>(Arrays.asList(
|
||||
"_id", IgnoredFieldMapper.NAME, "_index", "_routing", "_size", "_timestamp", "_ttl", "_type")));
|
||||
|
||||
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(MapperService.class));
|
||||
static final String DEFAULT_MAPPING_ERROR_MESSAGE = "[_default_] mappings are not allowed on new indices and should no " +
|
||||
|
@ -804,22 +809,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
indexAnalyzers.close();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Whether a field is a metadata field
|
||||
* Deserialization of SearchHit objects sent from pre 7.8 nodes and GetResults objects sent from pre 7.3 nodes,
|
||||
* uses this method to divide fields into meta and document fields.
|
||||
* TODO: remove in v 9.0
|
||||
* @deprecated Use an instance method isMetadataField instead
|
||||
*/
|
||||
@Deprecated
|
||||
public static boolean isMetadataFieldStatic(String fieldName) {
|
||||
if (IndicesModule.getBuiltInMetadataFields().contains(fieldName)) {
|
||||
return true;
|
||||
}
|
||||
// if a node had Size Plugin installed, _size field should also be considered a meta-field
|
||||
return fieldName.equals("_size");
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Whether a field is a metadata field.
|
||||
* this method considers all mapper plugins
|
||||
|
|
|
@ -168,7 +168,7 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
|
|||
documentFields = new HashMap<>();
|
||||
metaFields = new HashMap<>();
|
||||
fields.forEach((fieldName, docField) ->
|
||||
(MapperService.isMetadataFieldStatic(fieldName) ? metaFields : documentFields).put(fieldName, docField));
|
||||
(MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField));
|
||||
}
|
||||
|
||||
int size = in.readVInt();
|
||||
|
|
|
@ -22,11 +22,9 @@ package org.elasticsearch.search;
|
|||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.search.TotalHits;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.OriginalIndices;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.lucene.LuceneTests;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
|
@ -39,10 +37,8 @@ import org.elasticsearch.common.xcontent.json.JsonXContent;
|
|||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
|
@ -271,37 +267,4 @@ public class SearchHitsTests extends AbstractSerializingTestCase<SearchHits> {
|
|||
|
||||
}
|
||||
}
|
||||
|
||||
public void testReadFromPre6_6_0() throws IOException {
|
||||
try (StreamInput in = StreamInput.wrap(Base64.getDecoder().decode("AQC/gAAAAAA="))) {
|
||||
in.setVersion(VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_6_6_0)));
|
||||
SearchHits searchHits = new SearchHits(in);
|
||||
assertEquals(0, searchHits.getHits().length);
|
||||
assertNotNull(searchHits.getTotalHits());
|
||||
assertEquals(0L, searchHits.getTotalHits().value);
|
||||
assertEquals(TotalHits.Relation.EQUAL_TO, searchHits.getTotalHits().relation);
|
||||
assertEquals(-1F, searchHits.getMaxScore(), 0F);
|
||||
assertNull(searchHits.getSortFields());
|
||||
assertNull(searchHits.getCollapseField());
|
||||
assertNull(searchHits.getCollapseValues());
|
||||
}
|
||||
}
|
||||
|
||||
public void testSerializationPre6_6_0() throws IOException {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_6_6_0));
|
||||
SearchHits original = createTestItem(randomFrom(XContentType.values()), false, true, TotalHits.Relation.EQUAL_TO);
|
||||
SearchHits deserialized = copyInstance(original, version);
|
||||
assertArrayEquals(original.getHits(), deserialized.getHits());
|
||||
assertEquals(original.getMaxScore(), deserialized.getMaxScore(), 0F);
|
||||
if (original.getTotalHits() == null) {
|
||||
assertNull(deserialized.getTotalHits());
|
||||
} else {
|
||||
assertNotNull(deserialized.getTotalHits());
|
||||
assertEquals(original.getTotalHits().value, deserialized.getTotalHits().value);
|
||||
assertEquals(original.getTotalHits().relation, deserialized.getTotalHits().relation);
|
||||
}
|
||||
assertNull(deserialized.getSortFields());
|
||||
assertNull(deserialized.getCollapseField());
|
||||
assertNull(deserialized.getCollapseValues());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,9 +20,7 @@
|
|||
package org.elasticsearch.search;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.lucene.LuceneTests;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
|
@ -32,11 +30,9 @@ import org.elasticsearch.common.xcontent.XContentType;
|
|||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||
import org.elasticsearch.test.RandomObjects;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Base64;
|
||||
|
||||
public class SearchSortValuesTests extends AbstractSerializingTestCase<SearchSortValues> {
|
||||
|
||||
|
@ -140,22 +136,4 @@ public class SearchSortValuesTests extends AbstractSerializingTestCase<SearchSor
|
|||
values[sortValues.length] = randomSortValue(randomFrom(XContentType.values()), randomBoolean());
|
||||
return new SearchSortValues(values);
|
||||
}
|
||||
|
||||
public void testSerializationPre6_6_0() throws IOException {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_6_6_0));
|
||||
SearchSortValues original = createTestInstance();
|
||||
SearchSortValues deserialized = copyInstance(original, version);
|
||||
assertArrayEquals(original.getFormattedSortValues(), deserialized.getFormattedSortValues());
|
||||
assertEquals(0, deserialized.getRawSortValues().length);
|
||||
}
|
||||
|
||||
public void testReadFromPre6_6_0() throws IOException {
|
||||
try (StreamInput in = StreamInput.wrap(Base64.getDecoder().decode("AwIAAAABAQEyBUAIAAAAAAAAAAAAAAAA"))) {
|
||||
in.setVersion(VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_6_6_0)));
|
||||
SearchSortValues deserialized = new SearchSortValues(in);
|
||||
SearchSortValues expected = new SearchSortValues(new Object[]{1, "2", 3d});
|
||||
assertEquals(expected, deserialized);
|
||||
assertEquals(0, deserialized.getRawSortValues().length);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue