[Type Removal] Remove TypeFieldMapper usage, remove support of `_type` in searches and from LeafFieldsLookup (#3016)

Removes TypeFieldMapper and _type support from searches

Signed-off-by: Suraj Singh <surajrider@gmail.com>
This commit is contained in:
Suraj Singh 2022-04-20 20:22:11 -07:00 committed by GitHub
parent a34d11f15a
commit dbdee30a37
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 21 additions and 320 deletions

View File

@ -862,7 +862,7 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
PercolatorFieldMapper.verifyQuery(rangeQuery1); PercolatorFieldMapper.verifyQuery(rangeQuery1);
PercolatorFieldMapper.verifyQuery(rangeQuery2); PercolatorFieldMapper.verifyQuery(rangeQuery2);
HasChildQueryBuilder hasChildQuery = new HasChildQueryBuilder("_type", new MatchAllQueryBuilder(), ScoreMode.None); HasChildQueryBuilder hasChildQuery = new HasChildQueryBuilder("parent", new MatchAllQueryBuilder(), ScoreMode.None);
expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(hasChildQuery))); expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(hasChildQuery)));
expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new DisMaxQueryBuilder().add(hasChildQuery))); expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new DisMaxQueryBuilder().add(hasChildQuery)));
PercolatorFieldMapper.verifyQuery(new ConstantScoreQueryBuilder((rangeQuery1))); PercolatorFieldMapper.verifyQuery(new ConstantScoreQueryBuilder((rangeQuery1)));
@ -881,7 +881,7 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(hasChildQuery)); expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(hasChildQuery));
expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(hasChildQuery))); expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(hasChildQuery)));
HasParentQueryBuilder hasParentQuery = new HasParentQueryBuilder("_type", new MatchAllQueryBuilder(), false); HasParentQueryBuilder hasParentQuery = new HasParentQueryBuilder("parent", new MatchAllQueryBuilder(), false);
expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(hasParentQuery)); expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(hasParentQuery));
expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(hasParentQuery))); expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(hasParentQuery)));
} }

View File

@ -561,19 +561,6 @@ setup:
- match: {hits.total: 4} - match: {hits.total: 4}
---
"Test exists query on _type field":
- do:
search:
rest_total_hits_as_int: true
index: test
body:
query:
exists:
field: _type
- match: {hits.total: 4}
--- ---
"Test exists query on _routing field": "Test exists query on _routing field":
- do: - do:

View File

@ -47,7 +47,6 @@ import org.opensearch.common.Strings;
import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.XContentType;
import org.opensearch.index.mapper.MapperService;
import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.OpenSearchIntegTestCase;
import org.opensearch.test.hamcrest.OpenSearchAssertions; import org.opensearch.test.hamcrest.OpenSearchAssertions;
@ -58,7 +57,7 @@ import static org.opensearch.client.Requests.clearIndicesCacheRequest;
import static org.opensearch.client.Requests.getRequest; import static org.opensearch.client.Requests.getRequest;
import static org.opensearch.client.Requests.indexRequest; import static org.opensearch.client.Requests.indexRequest;
import static org.opensearch.client.Requests.refreshRequest; import static org.opensearch.client.Requests.refreshRequest;
import static org.opensearch.index.query.QueryBuilders.termQuery; import static org.opensearch.index.query.QueryBuilders.matchAllQuery;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
@ -181,11 +180,7 @@ public class DocumentActionsIT extends OpenSearchIntegTestCase {
// check count // check count
for (int i = 0; i < 5; i++) { for (int i = 0; i < 5; i++) {
// test successful // test successful
SearchResponse countResponse = client().prepareSearch("test") SearchResponse countResponse = client().prepareSearch("test").setSize(0).setQuery(matchAllQuery()).execute().actionGet();
.setSize(0)
.setQuery(termQuery("_type", MapperService.SINGLE_MAPPING_NAME))
.execute()
.actionGet();
assertNoFailures(countResponse); assertNoFailures(countResponse);
assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L)); assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L));
assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries));

View File

@ -1386,7 +1386,7 @@ public class TopHitsIT extends OpenSearchIntegTestCase {
SearchResponse response = client().prepareSearch("idx") SearchResponse response = client().prepareSearch("idx")
.addRescorer(new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f))) .addRescorer(new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f)))
.addAggregation( .addAggregation(
terms("terms").field(TERMS_AGGS_FIELD).subAggregation(topHits("hits").sort(SortBuilders.fieldSort("_type"))) terms("terms").field(TERMS_AGGS_FIELD).subAggregation(topHits("hits").sort(SortBuilders.fieldSort("_index")))
) )
.get(); .get();
Terms terms = response.getAggregations().get("terms"); Terms terms = response.getAggregations().get("terms");
@ -1403,7 +1403,7 @@ public class TopHitsIT extends OpenSearchIntegTestCase {
.addRescorer(new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f))) .addRescorer(new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f)))
.addAggregation( .addAggregation(
terms("terms").field(TERMS_AGGS_FIELD) terms("terms").field(TERMS_AGGS_FIELD)
.subAggregation(topHits("hits").sort(SortBuilders.scoreSort()).sort(SortBuilders.fieldSort("_type"))) .subAggregation(topHits("hits").sort(SortBuilders.scoreSort()).sort(SortBuilders.fieldSort("_index")))
) )
.get(); .get();
Terms terms = response.getAggregations().get("terms"); Terms terms = response.getAggregations().get("terms");

View File

@ -471,42 +471,6 @@ public class SearchFieldsIT extends OpenSearchIntegTestCase {
assertThat(fields, equalTo(singleton("id"))); assertThat(fields, equalTo(singleton("id")));
assertThat(response.getHits().getAt(i).getFields().get("id").getValue(), equalTo(Integer.toString(i))); assertThat(response.getHits().getAt(i).getFields().get("id").getValue(), equalTo(Integer.toString(i)));
} }
response = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort("num1", SortOrder.ASC)
.setSize(numDocs)
.addScriptField("type", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._type.value", Collections.emptyMap()))
.get();
assertNoFailures(response);
assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs));
for (int i = 0; i < numDocs; i++) {
assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i)));
Set<String> fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet());
assertThat(fields, equalTo(singleton("type")));
assertThat(response.getHits().getAt(i).getFields().get("type").getValue(), equalTo(MapperService.SINGLE_MAPPING_NAME));
}
response = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort("num1", SortOrder.ASC)
.setSize(numDocs)
.addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._id.value", Collections.emptyMap()))
.addScriptField("type", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._type.value", Collections.emptyMap()))
.get();
assertNoFailures(response);
assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs));
for (int i = 0; i < numDocs; i++) {
assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i)));
Set<String> fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet());
assertThat(fields, equalTo(newHashSet("type", "id")));
assertThat(response.getHits().getAt(i).getFields().get("type").getValue(), equalTo(MapperService.SINGLE_MAPPING_NAME));
assertThat(response.getHits().getAt(i).getFields().get("id").getValue(), equalTo(Integer.toString(i)));
}
} }
public void testScriptFieldUsingSource() throws Exception { public void testScriptFieldUsingSource() throws Exception {

View File

@ -161,7 +161,6 @@ public class DocumentMapper implements ToXContentFragment {
final Collection<String> deleteTombstoneMetadataFields = Arrays.asList( final Collection<String> deleteTombstoneMetadataFields = Arrays.asList(
VersionFieldMapper.NAME, VersionFieldMapper.NAME,
IdFieldMapper.NAME, IdFieldMapper.NAME,
TypeFieldMapper.NAME,
SeqNoFieldMapper.NAME, SeqNoFieldMapper.NAME,
SeqNoFieldMapper.PRIMARY_TERM_NAME, SeqNoFieldMapper.PRIMARY_TERM_NAME,
SeqNoFieldMapper.TOMBSTONE_NAME SeqNoFieldMapper.TOMBSTONE_NAME

View File

@ -576,11 +576,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
* Given the full name of a field, returns its {@link MappedFieldType}. * Given the full name of a field, returns its {@link MappedFieldType}.
*/ */
public MappedFieldType fieldType(String fullName) { public MappedFieldType fieldType(String fullName) {
if (fullName.equals(TypeFieldMapper.NAME)) {
String type = mapper == null ? null : mapper.type();
return new TypeFieldMapper.TypeFieldType(type);
}
return this.mapper == null ? null : this.mapper.fieldTypes().get(fullName); return this.mapper == null ? null : this.mapper.fieldTypes().get(fullName);
} }

View File

@ -41,7 +41,6 @@ import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.opensearch.common.geo.ShapeRelation; import org.opensearch.common.geo.ShapeRelation;
import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.regex.Regex; import org.opensearch.common.regex.Regex;
import org.opensearch.common.time.DateMathParser; import org.opensearch.common.time.DateMathParser;
import org.opensearch.index.fielddata.IndexFieldData; import org.opensearch.index.fielddata.IndexFieldData;
@ -55,17 +54,9 @@ import java.util.Collections;
import java.util.Objects; import java.util.Objects;
import java.util.function.Supplier; import java.util.function.Supplier;
// Todo: Remove TypeFieldMapper once we have NestedFieldMapper implementation
public class TypeFieldMapper extends MetadataFieldMapper { public class TypeFieldMapper extends MetadataFieldMapper {
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(TypeFieldType.class);
public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using the _type field "
+ "in queries and aggregations is deprecated, prefer to use a field instead.";
public static void emitTypesDeprecationWarning() {
deprecationLogger.deprecate("query_with_types", TYPES_DEPRECATION_MESSAGE);
}
public static final String NAME = "_type"; public static final String NAME = "_type";
public static final String CONTENT_TYPE = "_type"; public static final String CONTENT_TYPE = "_type";
@ -101,7 +92,6 @@ public class TypeFieldMapper extends MetadataFieldMapper {
@Override @Override
public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier<SearchLookup> searchLookup) { public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier<SearchLookup> searchLookup) {
emitTypesDeprecationWarning();
return new ConstantIndexFieldData.Builder(type, name(), CoreValuesSourceType.BYTES); return new ConstantIndexFieldData.Builder(type, name(), CoreValuesSourceType.BYTES);
} }
@ -112,13 +102,11 @@ public class TypeFieldMapper extends MetadataFieldMapper {
@Override @Override
public Query existsQuery(QueryShardContext context) { public Query existsQuery(QueryShardContext context) {
emitTypesDeprecationWarning();
return new MatchAllDocsQuery(); return new MatchAllDocsQuery();
} }
@Override @Override
protected boolean matches(String pattern, boolean caseInsensitive, QueryShardContext context) { protected boolean matches(String pattern, boolean caseInsensitive, QueryShardContext context) {
emitTypesDeprecationWarning();
if (type == null) { if (type == null) {
return false; return false;
} }
@ -136,7 +124,6 @@ public class TypeFieldMapper extends MetadataFieldMapper {
DateMathParser parser, DateMathParser parser,
QueryShardContext context QueryShardContext context
) { ) {
emitTypesDeprecationWarning();
BytesRef lower = (BytesRef) lowerTerm; BytesRef lower = (BytesRef) lowerTerm;
BytesRef upper = (BytesRef) upperTerm; BytesRef upper = (BytesRef) upperTerm;
if (includeLower) { if (includeLower) {

View File

@ -64,7 +64,6 @@ import org.opensearch.index.mapper.RoutingFieldMapper;
import org.opensearch.index.mapper.SeqNoFieldMapper; import org.opensearch.index.mapper.SeqNoFieldMapper;
import org.opensearch.index.mapper.SourceFieldMapper; import org.opensearch.index.mapper.SourceFieldMapper;
import org.opensearch.index.mapper.TextFieldMapper; import org.opensearch.index.mapper.TextFieldMapper;
import org.opensearch.index.mapper.TypeFieldMapper;
import org.opensearch.index.mapper.VersionFieldMapper; import org.opensearch.index.mapper.VersionFieldMapper;
import org.opensearch.index.seqno.RetentionLeaseBackgroundSyncAction; import org.opensearch.index.seqno.RetentionLeaseBackgroundSyncAction;
import org.opensearch.index.seqno.RetentionLeaseSyncAction; import org.opensearch.index.seqno.RetentionLeaseSyncAction;
@ -185,7 +184,6 @@ public class IndicesModule extends AbstractModule {
builtInMetadataMappers.put(IndexFieldMapper.NAME, IndexFieldMapper.PARSER); builtInMetadataMappers.put(IndexFieldMapper.NAME, IndexFieldMapper.PARSER);
builtInMetadataMappers.put(DataStreamFieldMapper.NAME, DataStreamFieldMapper.PARSER); builtInMetadataMappers.put(DataStreamFieldMapper.NAME, DataStreamFieldMapper.PARSER);
builtInMetadataMappers.put(SourceFieldMapper.NAME, SourceFieldMapper.PARSER); builtInMetadataMappers.put(SourceFieldMapper.NAME, SourceFieldMapper.PARSER);
builtInMetadataMappers.put(TypeFieldMapper.NAME, TypeFieldMapper.PARSER);
builtInMetadataMappers.put(VersionFieldMapper.NAME, VersionFieldMapper.PARSER); builtInMetadataMappers.put(VersionFieldMapper.NAME, VersionFieldMapper.PARSER);
builtInMetadataMappers.put(SeqNoFieldMapper.NAME, SeqNoFieldMapper.PARSER); builtInMetadataMappers.put(SeqNoFieldMapper.NAME, SeqNoFieldMapper.PARSER);
// _field_names must be added last so that it has a chance to see all the other mappers // _field_names must be added last so that it has a chance to see all the other mappers

View File

@ -34,10 +34,8 @@ package org.opensearch.search.lookup;
import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReader;
import org.opensearch.OpenSearchParseException; import org.opensearch.OpenSearchParseException;
import org.opensearch.index.fieldvisitor.SingleFieldsVisitor; import org.opensearch.index.fieldvisitor.SingleFieldsVisitor;
import org.opensearch.index.mapper.DocumentMapper;
import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.MappedFieldType;
import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.MapperService;
import org.opensearch.index.mapper.TypeFieldMapper;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -147,22 +145,12 @@ public class LeafFieldsLookup implements Map {
cachedFieldData.put(name, data); cachedFieldData.put(name, data);
} }
if (data.fields() == null) { if (data.fields() == null) {
List<Object> values; List<Object> values = new ArrayList<>(2);
if (TypeFieldMapper.NAME.equals(data.fieldType().name())) { SingleFieldsVisitor visitor = new SingleFieldsVisitor(data.fieldType(), values);
TypeFieldMapper.emitTypesDeprecationWarning(); try {
values = new ArrayList<>(1); reader.document(docId, visitor);
final DocumentMapper mapper = mapperService.documentMapper(); } catch (IOException e) {
if (mapper != null) { throw new OpenSearchParseException("failed to load field [{}]", e, name);
values.add(mapper.type());
}
} else {
values = new ArrayList<Object>(2);
SingleFieldsVisitor visitor = new SingleFieldsVisitor(data.fieldType(), values);
try {
reader.document(docId, visitor);
} catch (IOException e) {
throw new OpenSearchParseException("failed to load field [{}]", e, name);
}
} }
data.fields(singletonMap(data.fieldType().name(), values)); data.fields(singletonMap(data.fieldType().name(), values));
} }

View File

@ -381,7 +381,7 @@ public class InternalEngineTests extends EngineTestCase {
} }
public void testSegmentsWithIndexSort() throws Exception { public void testSegmentsWithIndexSort() throws Exception {
Sort indexSort = new Sort(new SortedSetSortField("_type", false)); Sort indexSort = new Sort(new SortedSetSortField("field", false));
try ( try (
Store store = createStore(); Store store = createStore();
Engine engine = createEngine(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null, null, null, indexSort, null) Engine engine = createEngine(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null, null, null, indexSort, null)

View File

@ -46,7 +46,6 @@ import org.opensearch.index.mapper.IdFieldMapper;
import org.opensearch.index.mapper.IndexFieldMapper; import org.opensearch.index.mapper.IndexFieldMapper;
import org.opensearch.index.mapper.SeqNoFieldMapper; import org.opensearch.index.mapper.SeqNoFieldMapper;
import org.opensearch.index.mapper.SourceFieldMapper; import org.opensearch.index.mapper.SourceFieldMapper;
import org.opensearch.index.mapper.TypeFieldMapper;
import org.opensearch.index.mapper.VersionFieldMapper; import org.opensearch.index.mapper.VersionFieldMapper;
import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.OpenSearchTestCase;
import org.opensearch.test.RandomObjects; import org.opensearch.test.RandomObjects;
@ -372,9 +371,8 @@ public class GetResultTests extends OpenSearchTestCase {
Map<String, DocumentField> fields = new HashMap<>(numFields); Map<String, DocumentField> fields = new HashMap<>(numFields);
Map<String, DocumentField> expectedFields = new HashMap<>(numFields); Map<String, DocumentField> expectedFields = new HashMap<>(numFields);
// As we are using this to construct a GetResult object that already contains // As we are using this to construct a GetResult object that already contains
// index, type, id, version, seqNo, and source fields, we need to exclude them from random fields // index, id, version, seqNo, and source fields, we need to exclude them from random fields
Predicate<String> excludeMetaFieldFilter = field -> field.equals(TypeFieldMapper.NAME) Predicate<String> excludeMetaFieldFilter = field -> field.equals(IndexFieldMapper.NAME)
|| field.equals(IndexFieldMapper.NAME)
|| field.equals(IdFieldMapper.NAME) || field.equals(IdFieldMapper.NAME)
|| field.equals(VersionFieldMapper.NAME) || field.equals(VersionFieldMapper.NAME)
|| field.equals(SourceFieldMapper.NAME) || field.equals(SourceFieldMapper.NAME)

View File

@ -1,106 +0,0 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Modifications Copyright OpenSearch Contributors. See
* GitHub history for details.
*/
package org.opensearch.index.mapper;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.opensearch.common.bytes.BytesArray;
import org.opensearch.common.compress.CompressedXContent;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.index.IndexService;
import org.opensearch.index.fielddata.IndexFieldDataCache;
import org.opensearch.index.fielddata.IndexOrdinalsFieldData;
import org.opensearch.index.fielddata.LeafOrdinalsFieldData;
import org.opensearch.index.mapper.MapperService.MergeReason;
import org.opensearch.indices.breaker.NoneCircuitBreakerService;
import org.opensearch.plugins.Plugin;
import org.opensearch.test.OpenSearchSingleNodeTestCase;
import org.opensearch.test.InternalSettingsPlugin;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.function.Function;
public class TypeFieldMapperTests extends OpenSearchSingleNodeTestCase {
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(InternalSettingsPlugin.class);
}
public void testDocValuesSingleType() throws Exception {
testDocValues(this::createIndex);
assertWarnings("[types removal] Using the _type field in queries and aggregations is deprecated, prefer to use a field instead.");
}
public static void testDocValues(Function<String, IndexService> createIndex) throws IOException {
MapperService mapperService = createIndex.apply("test").mapperService();
DocumentMapper mapper = mapperService.merge("type", new CompressedXContent("{\"type\":{}}"), MergeReason.MAPPING_UPDATE);
ParsedDocument document = mapper.parse(new SourceToParse("index", "id", new BytesArray("{}"), XContentType.JSON));
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
w.addDocument(document.rootDoc());
DirectoryReader r = DirectoryReader.open(w);
w.close();
MappedFieldType ft = mapperService.fieldType(TypeFieldMapper.NAME);
IndexOrdinalsFieldData fd = (IndexOrdinalsFieldData) ft.fielddataBuilder(
"test",
() -> { throw new UnsupportedOperationException(); }
).build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService());
LeafOrdinalsFieldData afd = fd.load(r.leaves().get(0));
SortedSetDocValues values = afd.getOrdinalsValues();
assertTrue(values.advanceExact(0));
assertEquals(0, values.nextOrd());
assertEquals(SortedSetDocValues.NO_MORE_ORDS, values.nextOrd());
assertEquals(new BytesRef("type"), values.lookupOrd(0));
r.close();
dir.close();
}
public void testDefaults() throws IOException {
Settings indexSettings = Settings.EMPTY;
MapperService mapperService = createIndex("test", indexSettings).mapperService();
DocumentMapper mapper = mapperService.merge("type", new CompressedXContent("{\"type\":{}}"), MergeReason.MAPPING_UPDATE);
ParsedDocument document = mapper.parse(new SourceToParse("index", "id", new BytesArray("{}"), XContentType.JSON));
assertEquals(Collections.<IndexableField>emptyList(), Arrays.asList(document.rootDoc().getFields(TypeFieldMapper.NAME)));
}
}

View File

@ -1,66 +0,0 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Modifications Copyright OpenSearch Contributors. See
* GitHub history for details.
*/
package org.opensearch.index.mapper;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.opensearch.index.query.QueryShardContext;
import org.opensearch.test.OpenSearchTestCase;
import org.mockito.Mockito;
import java.util.Arrays;
public class TypeFieldTypeTests extends OpenSearchTestCase {
public void testTermsQuery() {
QueryShardContext context = Mockito.mock(QueryShardContext.class);
TypeFieldMapper.TypeFieldType ft = new TypeFieldMapper.TypeFieldType("_doc");
Query query = ft.termQuery("my_type", context);
assertEquals(new MatchNoDocsQuery(), query);
query = ft.termQuery("_doc", context);
assertEquals(new MatchAllDocsQuery(), query);
query = ft.termsQuery(Arrays.asList("_doc", "type", "foo"), context);
assertEquals(new MatchAllDocsQuery(), query);
query = ft.termsQuery(Arrays.asList("type", "foo"), context);
assertEquals(new MatchNoDocsQuery(), query);
query = ft.termQueryCaseInsensitive("_DOC", context);
assertEquals(new MatchAllDocsQuery(), query);
assertWarnings("[types removal] Using the _type field in queries and aggregations is deprecated, prefer to use a field instead.");
}
}

View File

@ -41,7 +41,6 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.opensearch.common.ParsingException; import org.opensearch.common.ParsingException;
import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.MappedFieldType;
import org.opensearch.index.mapper.TypeFieldMapper;
import java.io.IOException; import java.io.IOException;
@ -198,12 +197,6 @@ public class TermQueryBuilderTests extends AbstractTermQueryTestCase<TermQueryBu
assertSerialization(parsedQuery); assertSerialization(parsedQuery);
} }
public void testTypeField() throws IOException {
TermQueryBuilder builder = QueryBuilders.termQuery("_type", "value1");
builder.doToQuery(createShardContext());
assertWarnings(TypeFieldMapper.TYPES_DEPRECATION_MESSAGE);
}
public void testRewriteIndexQueryToMatchNone() throws IOException { public void testRewriteIndexQueryToMatchNone() throws IOException {
TermQueryBuilder query = QueryBuilders.termQuery("_index", "does_not_exist"); TermQueryBuilder query = QueryBuilders.termQuery("_index", "does_not_exist");
QueryShardContext queryShardContext = createShardContext(); QueryShardContext queryShardContext = createShardContext();

View File

@ -50,7 +50,6 @@ import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentParser;
import org.opensearch.index.get.GetResult; import org.opensearch.index.get.GetResult;
import org.opensearch.index.mapper.TypeFieldMapper;
import org.opensearch.indices.TermsLookup; import org.opensearch.indices.TermsLookup;
import org.opensearch.test.AbstractQueryTestCase; import org.opensearch.test.AbstractQueryTestCase;
import org.hamcrest.CoreMatchers; import org.hamcrest.CoreMatchers;
@ -351,12 +350,6 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
assertEquals(Arrays.asList(5, 42d), TermsQueryBuilder.convertBack(TermsQueryBuilder.convert(list))); assertEquals(Arrays.asList(5, 42d), TermsQueryBuilder.convertBack(TermsQueryBuilder.convert(list)));
} }
public void testTypeField() throws IOException {
TermsQueryBuilder builder = QueryBuilders.termsQuery("_type", "value1", "value2");
builder.doToQuery(createShardContext());
assertWarnings(TypeFieldMapper.TYPES_DEPRECATION_MESSAGE);
}
public void testRewriteIndexQueryToMatchNone() throws IOException { public void testRewriteIndexQueryToMatchNone() throws IOException {
TermsQueryBuilder query = new TermsQueryBuilder("_index", "does_not_exist", "also_does_not_exist"); TermsQueryBuilder query = new TermsQueryBuilder("_index", "does_not_exist", "also_does_not_exist");
QueryShardContext queryShardContext = createShardContext(); QueryShardContext queryShardContext = createShardContext();

View File

@ -36,7 +36,6 @@ import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.search.WildcardQuery;
import org.opensearch.common.ParsingException; import org.opensearch.common.ParsingException;
import org.opensearch.index.mapper.TypeFieldMapper;
import org.opensearch.test.AbstractQueryTestCase; import org.opensearch.test.AbstractQueryTestCase;
import java.io.IOException; import java.io.IOException;
@ -150,12 +149,6 @@ public class WildcardQueryBuilderTests extends AbstractQueryTestCase<WildcardQue
assertEquals("[wildcard] query doesn't support multiple fields, found [user1] and [user2]", e.getMessage()); assertEquals("[wildcard] query doesn't support multiple fields, found [user1] and [user2]", e.getMessage());
} }
public void testTypeField() throws IOException {
WildcardQueryBuilder builder = QueryBuilders.wildcardQuery("_type", "doc*");
builder.doToQuery(createShardContext());
assertWarnings(TypeFieldMapper.TYPES_DEPRECATION_MESSAGE);
}
public void testRewriteIndexQueryToMatchNone() throws IOException { public void testRewriteIndexQueryToMatchNone() throws IOException {
WildcardQueryBuilder query = new WildcardQueryBuilder("_index", "does_not_exist"); WildcardQueryBuilder query = new WildcardQueryBuilder("_index", "does_not_exist");
QueryShardContext queryShardContext = createShardContext(); QueryShardContext queryShardContext = createShardContext();

View File

@ -45,7 +45,6 @@ import org.opensearch.index.mapper.RoutingFieldMapper;
import org.opensearch.index.mapper.SeqNoFieldMapper; import org.opensearch.index.mapper.SeqNoFieldMapper;
import org.opensearch.index.mapper.SourceFieldMapper; import org.opensearch.index.mapper.SourceFieldMapper;
import org.opensearch.index.mapper.TextFieldMapper; import org.opensearch.index.mapper.TextFieldMapper;
import org.opensearch.index.mapper.TypeFieldMapper;
import org.opensearch.index.mapper.VersionFieldMapper; import org.opensearch.index.mapper.VersionFieldMapper;
import org.opensearch.indices.mapper.MapperRegistry; import org.opensearch.indices.mapper.MapperRegistry;
import org.opensearch.plugins.MapperPlugin; import org.opensearch.plugins.MapperPlugin;
@ -95,7 +94,6 @@ public class IndicesModuleTests extends OpenSearchTestCase {
IndexFieldMapper.NAME, IndexFieldMapper.NAME,
DataStreamFieldMapper.NAME, DataStreamFieldMapper.NAME,
SourceFieldMapper.NAME, SourceFieldMapper.NAME,
TypeFieldMapper.NAME,
VersionFieldMapper.NAME, VersionFieldMapper.NAME,
SeqNoFieldMapper.NAME, SeqNoFieldMapper.NAME,
FieldNamesFieldMapper.NAME }; FieldNamesFieldMapper.NAME };

View File

@ -40,7 +40,6 @@ import org.opensearch.common.settings.Settings;
import org.opensearch.index.IndexService; import org.opensearch.index.IndexService;
import org.opensearch.index.engine.Engine; import org.opensearch.index.engine.Engine;
import org.opensearch.index.fielddata.SortedBinaryDocValues; import org.opensearch.index.fielddata.SortedBinaryDocValues;
import org.opensearch.index.mapper.TypeFieldMapper;
import org.opensearch.index.query.QueryShardContext; import org.opensearch.index.query.QueryShardContext;
import org.opensearch.test.OpenSearchSingleNodeTestCase; import org.opensearch.test.OpenSearchSingleNodeTestCase;
@ -310,25 +309,6 @@ public class ValuesSourceConfigTests extends OpenSearchSingleNodeTestCase {
} }
} }
public void testTypeFieldDeprecation() {
IndexService indexService = createIndex("index", Settings.EMPTY, "type");
try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) {
QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> 42L, null);
ValuesSourceConfig config = ValuesSourceConfig.resolve(
context,
null,
TypeFieldMapper.NAME,
null,
null,
null,
null,
CoreValuesSourceType.BYTES
);
assertWarnings(TypeFieldMapper.TYPES_DEPRECATION_MESSAGE);
}
}
public void testFieldAlias() throws Exception { public void testFieldAlias() throws Exception {
IndexService indexService = createIndex("index", Settings.EMPTY, "type", "field", "type=keyword", "alias", "type=alias,path=field"); IndexService indexService = createIndex("index", Settings.EMPTY, "type", "field", "type=keyword", "alias", "type=alias,path=field");
client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get();

View File

@ -117,6 +117,11 @@ public class FieldFetcherTests extends OpenSearchSingleNodeTestCase {
Map<String, DocumentField> fields = fetchFields(mapperService, source, "_routing"); Map<String, DocumentField> fields = fetchFields(mapperService, source, "_routing");
assertTrue(fields.isEmpty()); assertTrue(fields.isEmpty());
// The _type field was deprecated in 7.x and is not supported in 2.0. So the behavior
// should be the same as if the field didn't exist.
fields = fetchFields(mapperService, source, "_type");
assertTrue(fields.isEmpty());
} }
public void testFetchAllFields() throws IOException { public void testFetchAllFields() throws IOException {