[Remove] Multiple Types from IndexTemplateMetadata (#2400)
Removes multi-type support from IndexTemplateMetadata so that instead of holding a map of multiple types to mappings, it only returns a single mapping for a single type. Also removes type from documentMapper() method to avoid any accidental NullPointerExceptions in the internal mapping retrieval. Signed-off-by: Nicholas Walter Knize <nknize@apache.org>
This commit is contained in:
parent
9cfa395128
commit
b00b3ce2b5
|
@ -83,7 +83,7 @@ public class GetIndexTemplatesResponseTests extends OpenSearchTestCase {
|
|||
.test();
|
||||
}
|
||||
|
||||
public void testParsingFromEsResponse() throws IOException {
|
||||
public void testParsingFromOpenSearchResponse() throws IOException {
|
||||
for (int runs = 0; runs < 20; runs++) {
|
||||
org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse esResponse =
|
||||
new org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse(new ArrayList<>());
|
||||
|
@ -131,8 +131,7 @@ public class GetIndexTemplatesResponseTests extends OpenSearchTestCase {
|
|||
assertThat(result.order(), equalTo(esIMD.order()));
|
||||
assertThat(result.version(), equalTo(esIMD.version()));
|
||||
|
||||
assertThat(esIMD.mappings().size(), equalTo(1));
|
||||
BytesReference mappingSource = esIMD.mappings().valuesIt().next().uncompressed();
|
||||
BytesReference mappingSource = esIMD.mappings().uncompressed();
|
||||
Map<String, Object> expectedMapping = XContentHelper.convertToMap(mappingSource, true, xContentBuilder.contentType())
|
||||
.v2();
|
||||
assertThat(result.mappings().sourceAsMap(), equalTo(expectedMapping.get("_doc")));
|
||||
|
@ -224,7 +223,10 @@ public class GetIndexTemplatesResponseTests extends OpenSearchTestCase {
|
|||
serverTemplateBuilder.order(clientITMD.order());
|
||||
serverTemplateBuilder.version(clientITMD.version());
|
||||
if (clientITMD.mappings() != null) {
|
||||
serverTemplateBuilder.putMapping(MapperService.SINGLE_MAPPING_NAME, clientITMD.mappings().source());
|
||||
// The client-side mappings never include a wrapping type, but server-side mappings
|
||||
// for index templates still do so we need to wrap things here
|
||||
String mappings = "{\"" + MapperService.SINGLE_MAPPING_NAME + "\": " + clientITMD.mappings().source().string() + "}";
|
||||
serverTemplateBuilder.putMapping(MapperService.SINGLE_MAPPING_NAME, mappings);
|
||||
}
|
||||
serverIndexTemplates.add(serverTemplateBuilder.build());
|
||||
|
||||
|
|
|
@ -584,7 +584,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
|||
);
|
||||
}
|
||||
}
|
||||
docMapper = mapperService.documentMapper(type);
|
||||
docMapper = mapperService.documentMapper();
|
||||
for (BytesReference document : documents) {
|
||||
docs.add(docMapper.parse(new SourceToParse(context.index().getName(), type, "_temp_id", document, documentXContentType)));
|
||||
}
|
||||
|
|
|
@ -209,7 +209,7 @@ public class CandidateQueryTests extends OpenSearchSingleNodeTestCase {
|
|||
.endObject()
|
||||
);
|
||||
mapperService.merge("type", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE);
|
||||
fieldMapper = (PercolatorFieldMapper) mapperService.documentMapper("type").mappers().getMapper(queryField);
|
||||
fieldMapper = (PercolatorFieldMapper) mapperService.documentMapper().mappers().getMapper(queryField);
|
||||
fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
|
||||
|
||||
queries = new ArrayList<>();
|
||||
|
|
|
@ -161,7 +161,6 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
String mapper = Strings.toString(
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("doc")
|
||||
.startObject("properties")
|
||||
.startObject("field")
|
||||
.field("type", "text")
|
||||
|
@ -204,9 +203,8 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
);
|
||||
mapperService.merge("doc", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE);
|
||||
mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE);
|
||||
}
|
||||
|
||||
private void addQueryFieldMappings() throws Exception {
|
||||
|
@ -214,16 +212,18 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
String percolatorMapper = Strings.toString(
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("doc")
|
||||
.startObject("properties")
|
||||
.startObject(fieldName)
|
||||
.field("type", "percolator")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
);
|
||||
mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE);
|
||||
mapperService.merge(
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
new CompressedXContent(percolatorMapper),
|
||||
MapperService.MergeReason.MAPPING_UPDATE
|
||||
);
|
||||
fieldType = (PercolatorFieldMapper.PercolatorFieldType) mapperService.fieldType(fieldName);
|
||||
}
|
||||
|
||||
|
@ -235,7 +235,7 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
TermQuery termQuery2 = new TermQuery(new Term("field", "term2"));
|
||||
bq.add(termQuery2, Occur.SHOULD);
|
||||
|
||||
DocumentMapper documentMapper = mapperService.documentMapper("doc");
|
||||
DocumentMapper documentMapper = mapperService.documentMapper();
|
||||
PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName);
|
||||
IndexMetadata build = IndexMetadata.builder("")
|
||||
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT))
|
||||
|
@ -295,7 +295,7 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
Query rangeQuery2 = mapperService.fieldType("number_field1").rangeQuery(15, 20, true, true, null, null, null, context);
|
||||
bq.add(rangeQuery2, Occur.MUST);
|
||||
|
||||
DocumentMapper documentMapper = mapperService.documentMapper("doc");
|
||||
DocumentMapper documentMapper = mapperService.documentMapper();
|
||||
IndexMetadata build = IndexMetadata.builder("")
|
||||
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT))
|
||||
.numberOfShards(1)
|
||||
|
@ -354,7 +354,7 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
public void testExtractTermsAndRanges_failed() throws Exception {
|
||||
addQueryFieldMappings();
|
||||
TermRangeQuery query = new TermRangeQuery("field1", new BytesRef("a"), new BytesRef("z"), true, true);
|
||||
DocumentMapper documentMapper = mapperService.documentMapper("doc");
|
||||
DocumentMapper documentMapper = mapperService.documentMapper();
|
||||
PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName);
|
||||
IndexMetadata build = IndexMetadata.builder("")
|
||||
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT))
|
||||
|
@ -380,7 +380,7 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
public void testExtractTermsAndRanges_partial() throws Exception {
|
||||
addQueryFieldMappings();
|
||||
PhraseQuery phraseQuery = new PhraseQuery("field", "term");
|
||||
DocumentMapper documentMapper = mapperService.documentMapper("doc");
|
||||
DocumentMapper documentMapper = mapperService.documentMapper();
|
||||
PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName);
|
||||
IndexMetadata build = IndexMetadata.builder("")
|
||||
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT))
|
||||
|
@ -549,11 +549,11 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
public void testPercolatorFieldMapper() throws Exception {
|
||||
addQueryFieldMappings();
|
||||
QueryBuilder queryBuilder = termQuery("field", "value");
|
||||
ParsedDocument doc = mapperService.documentMapper("doc")
|
||||
ParsedDocument doc = mapperService.documentMapper()
|
||||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"doc",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -570,11 +570,11 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
|
||||
// add an query for which we don't extract terms from
|
||||
queryBuilder = rangeQuery("field").from("a").to("z");
|
||||
doc = mapperService.documentMapper("doc")
|
||||
doc = mapperService.documentMapper()
|
||||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"doc",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -592,7 +592,7 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"doc",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -617,11 +617,11 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
// (it can't use shard data for rewriting purposes, because percolator queries run on MemoryIndex)
|
||||
|
||||
for (QueryBuilder query : queries) {
|
||||
ParsedDocument doc = mapperService.documentMapper("doc")
|
||||
ParsedDocument doc = mapperService.documentMapper()
|
||||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"doc",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, query).endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -636,11 +636,11 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
addQueryFieldMappings();
|
||||
client().prepareIndex("remote").setId("1").setSource("field", "value").get();
|
||||
QueryBuilder queryBuilder = termsLookupQuery("field", new TermsLookup("remote", "1", "field"));
|
||||
ParsedDocument doc = mapperService.documentMapper("doc")
|
||||
ParsedDocument doc = mapperService.documentMapper()
|
||||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"doc",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -661,11 +661,11 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
public void testPercolatorFieldMapperUnMappedField() throws Exception {
|
||||
addQueryFieldMappings();
|
||||
MapperParsingException exception = expectThrows(MapperParsingException.class, () -> {
|
||||
mapperService.documentMapper("doc")
|
||||
mapperService.documentMapper()
|
||||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"doc",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder().startObject().field(fieldName, termQuery("unmapped_field", "value")).endObject()
|
||||
|
@ -680,11 +680,11 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
|
||||
public void testPercolatorFieldMapper_noQuery() throws Exception {
|
||||
addQueryFieldMappings();
|
||||
ParsedDocument doc = mapperService.documentMapper("doc")
|
||||
ParsedDocument doc = mapperService.documentMapper()
|
||||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"doc",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -693,11 +693,11 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
assertThat(doc.rootDoc().getFields(fieldType.queryBuilderField.name()).length, equalTo(0));
|
||||
|
||||
try {
|
||||
mapperService.documentMapper("doc")
|
||||
mapperService.documentMapper()
|
||||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"doc",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField(fieldName).endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -716,7 +716,6 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
String percolatorMapper = Strings.toString(
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("doc")
|
||||
.startObject("properties")
|
||||
.startObject(fieldName)
|
||||
.field("type", "percolator")
|
||||
|
@ -724,18 +723,21 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
);
|
||||
MapperParsingException e = expectThrows(
|
||||
MapperParsingException.class,
|
||||
() -> mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE)
|
||||
() -> mapperService.merge(
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
new CompressedXContent(percolatorMapper),
|
||||
MapperService.MergeReason.MAPPING_UPDATE
|
||||
)
|
||||
);
|
||||
assertThat(e.getMessage(), containsString("Mapping definition for [" + fieldName + "] has unsupported parameters: [index : no]"));
|
||||
}
|
||||
|
||||
// multiple percolator fields are allowed in the mapping, but only one field can be used at index time.
|
||||
public void testMultiplePercolatorFields() throws Exception {
|
||||
String typeName = "doc";
|
||||
String typeName = MapperService.SINGLE_MAPPING_NAME;
|
||||
String percolatorMapper = Strings.toString(
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
|
@ -754,7 +756,7 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE);
|
||||
|
||||
QueryBuilder queryBuilder = matchQuery("field", "value");
|
||||
ParsedDocument doc = mapperService.documentMapper(typeName)
|
||||
ParsedDocument doc = mapperService.documentMapper()
|
||||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
|
@ -776,7 +778,7 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
|
||||
// percolator field can be nested under an object field, but only one query can be specified per document
|
||||
public void testNestedPercolatorField() throws Exception {
|
||||
String typeName = "doc";
|
||||
String typeName = MapperService.SINGLE_MAPPING_NAME;
|
||||
String percolatorMapper = Strings.toString(
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
|
@ -797,7 +799,7 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE);
|
||||
|
||||
QueryBuilder queryBuilder = matchQuery("field", "value");
|
||||
ParsedDocument doc = mapperService.documentMapper(typeName)
|
||||
ParsedDocument doc = mapperService.documentMapper()
|
||||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
|
@ -817,7 +819,7 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
BytesRef queryBuilderAsBytes = queryBuilderField.binaryValue();
|
||||
assertQueryBuilder(queryBuilderAsBytes, queryBuilder);
|
||||
|
||||
doc = mapperService.documentMapper(typeName)
|
||||
doc = mapperService.documentMapper()
|
||||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
|
@ -840,7 +842,7 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
assertQueryBuilder(queryBuilderAsBytes, queryBuilder);
|
||||
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class, () -> {
|
||||
mapperService.documentMapper(typeName)
|
||||
mapperService.documentMapper()
|
||||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
|
@ -948,11 +950,11 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
query.endObject();
|
||||
query.endObject();
|
||||
|
||||
ParsedDocument doc = mapperService.documentMapper("doc")
|
||||
ParsedDocument doc = mapperService.documentMapper()
|
||||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"doc",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -996,11 +998,11 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
query.endObject();
|
||||
query.endObject();
|
||||
|
||||
doc = mapperService.documentMapper("doc")
|
||||
doc = mapperService.documentMapper()
|
||||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"doc",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -1091,11 +1093,11 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
|
||||
QueryBuilder qb = boolQuery().must(boolQuery().must(termQuery("field", "value1")).must(termQuery("field", "value2")))
|
||||
.must(boolQuery().must(termQuery("field", "value2")).must(termQuery("field", "value3")));
|
||||
ParsedDocument doc = mapperService.documentMapper("doc")
|
||||
ParsedDocument doc = mapperService.documentMapper()
|
||||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"doc",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, qb).endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -1117,11 +1119,11 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.must(boolQuery().must(termQuery("field", "value2")).must(termQuery("field", "value3")))
|
||||
.must(boolQuery().must(termQuery("field", "value3")).must(termQuery("field", "value4")))
|
||||
.must(boolQuery().should(termQuery("field", "value4")).should(termQuery("field", "value5")));
|
||||
doc = mapperService.documentMapper("doc")
|
||||
doc = mapperService.documentMapper()
|
||||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"doc",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, qb).endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -1146,11 +1148,11 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.should(boolQuery().should(termQuery("field", "value2")).should(termQuery("field", "value3")))
|
||||
.should(boolQuery().should(termQuery("field", "value3")).should(termQuery("field", "value4")))
|
||||
.should(boolQuery().should(termQuery("field", "value4")).should(termQuery("field", "value5")));
|
||||
doc = mapperService.documentMapper("doc")
|
||||
doc = mapperService.documentMapper()
|
||||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"doc",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, qb).endObject()),
|
||||
XContentType.JSON
|
||||
|
|
|
@ -63,10 +63,10 @@ public class SizeMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
|
||||
public void testSizeEnabled() throws Exception {
|
||||
IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=true");
|
||||
DocumentMapper docMapper = service.mapperService().documentMapper("type");
|
||||
DocumentMapper docMapper = service.mapperService().documentMapper();
|
||||
|
||||
BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject());
|
||||
ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", source, XContentType.JSON));
|
||||
ParsedDocument doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", source, XContentType.JSON));
|
||||
|
||||
boolean stored = false;
|
||||
boolean points = false;
|
||||
|
@ -80,27 +80,27 @@ public class SizeMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
|
||||
public void testSizeDisabled() throws Exception {
|
||||
IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=false");
|
||||
DocumentMapper docMapper = service.mapperService().documentMapper("type");
|
||||
DocumentMapper docMapper = service.mapperService().documentMapper();
|
||||
|
||||
BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject());
|
||||
ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", source, XContentType.JSON));
|
||||
ParsedDocument doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", source, XContentType.JSON));
|
||||
|
||||
assertThat(doc.rootDoc().getField("_size"), nullValue());
|
||||
}
|
||||
|
||||
public void testSizeNotSet() throws Exception {
|
||||
IndexService service = createIndex("test", Settings.EMPTY, "type");
|
||||
DocumentMapper docMapper = service.mapperService().documentMapper("type");
|
||||
IndexService service = createIndex("test", Settings.EMPTY, MapperService.SINGLE_MAPPING_NAME);
|
||||
DocumentMapper docMapper = service.mapperService().documentMapper();
|
||||
|
||||
BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject());
|
||||
ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", source, XContentType.JSON));
|
||||
ParsedDocument doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", source, XContentType.JSON));
|
||||
|
||||
assertThat(doc.rootDoc().getField("_size"), nullValue());
|
||||
}
|
||||
|
||||
public void testThatDisablingWorksWhenMerging() throws Exception {
|
||||
IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=true");
|
||||
DocumentMapper docMapper = service.mapperService().documentMapper("type");
|
||||
DocumentMapper docMapper = service.mapperService().documentMapper();
|
||||
assertThat(docMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(true));
|
||||
|
||||
String disabledMapping = Strings.toString(
|
||||
|
|
|
@ -664,9 +664,6 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase {
|
|||
// before timing out
|
||||
.put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms")
|
||||
.put(SETTING_ALLOCATION_MAX_RETRY.getKey(), "0"); // fail faster
|
||||
if (randomBoolean()) {
|
||||
settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), "-1");
|
||||
}
|
||||
createIndex(index, settings.build());
|
||||
}
|
||||
ensureGreen(index);
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
---
|
||||
"Return empty object if field doesn't exist, but type and index do":
|
||||
"Return empty object if field doesn't exist, but index does":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_index
|
||||
|
|
|
@ -52,19 +52,18 @@ import org.opensearch.common.xcontent.XContentFactory;
|
|||
import org.opensearch.index.IndexNotFoundException;
|
||||
import org.opensearch.index.IndexService;
|
||||
import org.opensearch.index.mapper.MapperParsingException;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
import org.opensearch.index.query.RangeQueryBuilder;
|
||||
import org.opensearch.indices.IndicesService;
|
||||
import org.opensearch.test.OpenSearchIntegTestCase;
|
||||
import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope;
|
||||
import org.opensearch.test.OpenSearchIntegTestCase.Scope;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.BiFunction;
|
||||
|
||||
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_WAIT_FOR_ACTIVE_SHARDS;
|
||||
import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
|
||||
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertBlocked;
|
||||
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertRequestBuilderThrows;
|
||||
|
@ -109,28 +108,6 @@ public class CreateIndexIT extends OpenSearchIntegTestCase {
|
|||
assertThat(index.getCreationDate(), allOf(lessThanOrEqualTo(timeAfterRequest), greaterThanOrEqualTo(timeBeforeRequest)));
|
||||
}
|
||||
|
||||
public void testDoubleAddMapping() throws Exception {
|
||||
try {
|
||||
prepareCreate("test").addMapping("type1", "date", "type=date").addMapping("type1", "num", "type=integer");
|
||||
fail("did not hit expected exception");
|
||||
} catch (IllegalStateException ise) {
|
||||
// expected
|
||||
}
|
||||
try {
|
||||
prepareCreate("test").addMapping("type1", new HashMap<String, Object>()).addMapping("type1", new HashMap<String, Object>());
|
||||
fail("did not hit expected exception");
|
||||
} catch (IllegalStateException ise) {
|
||||
// expected
|
||||
}
|
||||
try {
|
||||
prepareCreate("test").addMapping("type1", jsonBuilder().startObject().endObject())
|
||||
.addMapping("type1", jsonBuilder().startObject().endObject());
|
||||
fail("did not hit expected exception");
|
||||
} catch (IllegalStateException ise) {
|
||||
// expected
|
||||
}
|
||||
}
|
||||
|
||||
public void testNonNestedMappings() throws Exception {
|
||||
assertAcked(
|
||||
prepareCreate("test").addMapping(
|
||||
|
@ -168,11 +145,16 @@ public class CreateIndexIT extends OpenSearchIntegTestCase {
|
|||
MapperParsingException e = expectThrows(
|
||||
MapperParsingException.class,
|
||||
() -> prepareCreate("test").addMapping(
|
||||
"type1",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
XContentFactory.jsonBuilder().startObject().startObject("type2").endObject().endObject()
|
||||
).get()
|
||||
);
|
||||
assertThat(e.getMessage(), startsWith("Failed to parse mapping [type1]: Root mapping definition has unsupported parameters"));
|
||||
assertThat(
|
||||
e.getMessage(),
|
||||
startsWith(
|
||||
"Failed to parse mapping [" + MapperService.SINGLE_MAPPING_NAME + "]: Root mapping definition has unsupported parameters"
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public void testEmptyMappings() throws Exception {
|
||||
|
|
|
@ -365,7 +365,7 @@ public class RareClusterStateIT extends OpenSearchIntegTestCase {
|
|||
final IndexService indexService = indicesService.indexServiceSafe(index);
|
||||
assertNotNull(indexService);
|
||||
final MapperService mapperService = indexService.mapperService();
|
||||
DocumentMapper mapper = mapperService.documentMapper(MapperService.SINGLE_MAPPING_NAME);
|
||||
DocumentMapper mapper = mapperService.documentMapper();
|
||||
assertNotNull(mapper);
|
||||
assertNotNull(mapper.mappers().getMapper("field"));
|
||||
});
|
||||
|
@ -389,7 +389,7 @@ public class RareClusterStateIT extends OpenSearchIntegTestCase {
|
|||
final IndexService indexService = indicesService.indexServiceSafe(index);
|
||||
assertNotNull(indexService);
|
||||
final MapperService mapperService = indexService.mapperService();
|
||||
DocumentMapper mapper = mapperService.documentMapper(MapperService.SINGLE_MAPPING_NAME);
|
||||
DocumentMapper mapper = mapperService.documentMapper();
|
||||
assertNotNull(mapper);
|
||||
assertNotNull(mapper.mappers().getMapper("field2"));
|
||||
});
|
||||
|
|
|
@ -47,6 +47,7 @@ import org.opensearch.common.Strings;
|
|||
import org.opensearch.common.xcontent.XContentBuilder;
|
||||
import org.opensearch.common.xcontent.XContentFactory;
|
||||
import org.opensearch.common.xcontent.XContentType;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
import org.opensearch.test.OpenSearchIntegTestCase;
|
||||
import org.opensearch.test.hamcrest.OpenSearchAssertions;
|
||||
|
||||
|
@ -182,7 +183,7 @@ public class DocumentActionsIT extends OpenSearchIntegTestCase {
|
|||
// test successful
|
||||
SearchResponse countResponse = client().prepareSearch("test")
|
||||
.setSize(0)
|
||||
.setQuery(termQuery("_type", "type1"))
|
||||
.setQuery(termQuery("_type", MapperService.SINGLE_MAPPING_NAME))
|
||||
.execute()
|
||||
.actionGet();
|
||||
assertNoFailures(countResponse);
|
||||
|
|
|
@ -60,6 +60,7 @@ import org.opensearch.common.xcontent.XContentFactory;
|
|||
import org.opensearch.common.xcontent.XContentType;
|
||||
import org.opensearch.env.NodeEnvironment;
|
||||
import org.opensearch.index.mapper.MapperParsingException;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
import org.opensearch.indices.IndexClosedException;
|
||||
import org.opensearch.indices.ShardLimitValidator;
|
||||
import org.opensearch.test.OpenSearchIntegTestCase;
|
||||
|
@ -108,14 +109,7 @@ public class GatewayIndexStateIT extends OpenSearchIntegTestCase {
|
|||
.prepareCreate("test")
|
||||
.addMapping(
|
||||
"type1",
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type1")
|
||||
.startObject("_routing")
|
||||
.field("required", true)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
XContentFactory.jsonBuilder().startObject().startObject("_routing").field("required", true).endObject().endObject()
|
||||
)
|
||||
.execute()
|
||||
.actionGet();
|
||||
|
@ -130,7 +124,7 @@ public class GatewayIndexStateIT extends OpenSearchIntegTestCase {
|
|||
.metadata()
|
||||
.index("test")
|
||||
.getMappings()
|
||||
.get("type1");
|
||||
.get(MapperService.SINGLE_MAPPING_NAME);
|
||||
assertThat(mappingMd.routing().required(), equalTo(true));
|
||||
|
||||
logger.info("--> restarting nodes...");
|
||||
|
@ -149,7 +143,7 @@ public class GatewayIndexStateIT extends OpenSearchIntegTestCase {
|
|||
.metadata()
|
||||
.index("test")
|
||||
.getMappings()
|
||||
.get("type1");
|
||||
.get(MapperService.SINGLE_MAPPING_NAME);
|
||||
assertThat(mappingMd.routing().required(), equalTo(true));
|
||||
}
|
||||
|
||||
|
|
|
@ -208,7 +208,6 @@ public class MultiFieldsIntegrationIT extends OpenSearchIntegTestCase {
|
|||
private XContentBuilder createPutMappingSource() throws IOException {
|
||||
return XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("my-type")
|
||||
.startObject("properties")
|
||||
.startObject("title")
|
||||
.field("type", "text")
|
||||
|
@ -220,7 +219,6 @@ public class MultiFieldsIntegrationIT extends OpenSearchIntegTestCase {
|
|||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
}
|
||||
|
||||
|
|
|
@ -74,6 +74,7 @@ import org.opensearch.index.engine.CommitStats;
|
|||
import org.opensearch.index.engine.Engine;
|
||||
import org.opensearch.index.engine.NoOpEngine;
|
||||
import org.opensearch.index.flush.FlushStats;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
import org.opensearch.index.mapper.SourceToParse;
|
||||
import org.opensearch.index.seqno.RetentionLeaseSyncer;
|
||||
import org.opensearch.index.seqno.SequenceNumbers;
|
||||
|
@ -445,7 +446,7 @@ public class IndexShardIT extends OpenSearchSingleNodeTestCase {
|
|||
.put("index.number_of_shards", 1)
|
||||
.put("index.translog.generation_threshold_size", generationThreshold + "b")
|
||||
.build();
|
||||
createIndex("test", settings, "test");
|
||||
createIndex("test", settings, MapperService.SINGLE_MAPPING_NAME);
|
||||
ensureGreen("test");
|
||||
final IndicesService indicesService = getInstanceFromNode(IndicesService.class);
|
||||
final IndexService test = indicesService.indexService(resolveIndex("test"));
|
||||
|
@ -459,7 +460,7 @@ public class IndexShardIT extends OpenSearchSingleNodeTestCase {
|
|||
final Engine.IndexResult result = shard.applyIndexOperationOnPrimary(
|
||||
Versions.MATCH_ANY,
|
||||
VersionType.INTERNAL,
|
||||
new SourceToParse("test", "test", "1", new BytesArray("{}"), XContentType.JSON),
|
||||
new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", new BytesArray("{}"), XContentType.JSON),
|
||||
SequenceNumbers.UNASSIGNED_SEQ_NO,
|
||||
0,
|
||||
IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
|
||||
|
|
|
@ -1,128 +0,0 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* The OpenSearch Contributors require contributions made to
|
||||
* this file be licensed under the Apache-2.0 license or a
|
||||
* compatible open source license.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
/*
|
||||
* Modifications Copyright OpenSearch Contributors. See
|
||||
* GitHub history for details.
|
||||
*/
|
||||
|
||||
package org.opensearch.indices.exists.types;
|
||||
|
||||
import org.opensearch.action.admin.indices.create.CreateIndexResponse;
|
||||
import org.opensearch.action.admin.indices.exists.types.TypesExistsResponse;
|
||||
import org.opensearch.client.Client;
|
||||
import org.opensearch.cluster.metadata.IndexMetadata;
|
||||
import org.opensearch.index.IndexNotFoundException;
|
||||
import org.opensearch.plugins.Plugin;
|
||||
import org.opensearch.test.OpenSearchIntegTestCase;
|
||||
import org.opensearch.test.InternalSettingsPlugin;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_BLOCKS_READ;
|
||||
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_BLOCKS_WRITE;
|
||||
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_READ_ONLY;
|
||||
import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
|
||||
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertBlocked;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class TypesExistsIT extends OpenSearchIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(InternalSettingsPlugin.class);
|
||||
}
|
||||
|
||||
public void testSimple() throws Exception {
|
||||
Client client = client();
|
||||
CreateIndexResponse response1 = client.admin()
|
||||
.indices()
|
||||
.prepareCreate("test1")
|
||||
.addMapping("type1", jsonBuilder().startObject().startObject("type1").endObject().endObject())
|
||||
.execute()
|
||||
.actionGet();
|
||||
CreateIndexResponse response2 = client.admin()
|
||||
.indices()
|
||||
.prepareCreate("test2")
|
||||
.addMapping("type2", jsonBuilder().startObject().startObject("type2").endObject().endObject())
|
||||
.execute()
|
||||
.actionGet();
|
||||
client.admin().indices().prepareAliases().addAlias("test1", "alias1").execute().actionGet();
|
||||
assertAcked(response1);
|
||||
assertAcked(response2);
|
||||
|
||||
TypesExistsResponse response = client.admin().indices().prepareTypesExists("test1").setTypes("type1").execute().actionGet();
|
||||
assertThat(response.isExists(), equalTo(true));
|
||||
response = client.admin().indices().prepareTypesExists("test1").setTypes("type2").execute().actionGet();
|
||||
assertThat(response.isExists(), equalTo(false));
|
||||
try {
|
||||
client.admin().indices().prepareTypesExists("notExist").setTypes("type1").execute().actionGet();
|
||||
fail("Exception should have been thrown");
|
||||
} catch (IndexNotFoundException e) {}
|
||||
try {
|
||||
client.admin().indices().prepareTypesExists("notExist").setTypes("type0").execute().actionGet();
|
||||
fail("Exception should have been thrown");
|
||||
} catch (IndexNotFoundException e) {}
|
||||
response = client.admin().indices().prepareTypesExists("alias1").setTypes("type1").execute().actionGet();
|
||||
assertThat(response.isExists(), equalTo(true));
|
||||
response = client.admin().indices().prepareTypesExists("*").setTypes("type1").execute().actionGet();
|
||||
assertThat(response.isExists(), equalTo(false));
|
||||
response = client.admin().indices().prepareTypesExists("test1", "test2").setTypes("type1").execute().actionGet();
|
||||
assertThat(response.isExists(), equalTo(false));
|
||||
response = client.admin().indices().prepareTypesExists("test1", "test2").setTypes("type2").execute().actionGet();
|
||||
assertThat(response.isExists(), equalTo(false));
|
||||
}
|
||||
|
||||
public void testTypesExistsWithBlocks() throws IOException {
|
||||
assertAcked(prepareCreate("ro").addMapping("type1", jsonBuilder().startObject().startObject("type1").endObject().endObject()));
|
||||
ensureGreen("ro");
|
||||
|
||||
// Request is not blocked
|
||||
for (String block : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY)) {
|
||||
try {
|
||||
enableIndexBlock("ro", block);
|
||||
assertThat(
|
||||
client().admin().indices().prepareTypesExists("ro").setTypes("type1").execute().actionGet().isExists(),
|
||||
equalTo(true)
|
||||
);
|
||||
} finally {
|
||||
disableIndexBlock("ro", block);
|
||||
}
|
||||
}
|
||||
|
||||
// Request is blocked
|
||||
try {
|
||||
enableIndexBlock("ro", IndexMetadata.SETTING_BLOCKS_METADATA);
|
||||
assertBlocked(client().admin().indices().prepareTypesExists("ro").setTypes("type1"));
|
||||
} finally {
|
||||
disableIndexBlock("ro", IndexMetadata.SETTING_BLOCKS_METADATA);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -77,7 +77,7 @@ public class SimpleGetFieldMappingsIT extends OpenSearchIntegTestCase {
|
|||
assertThat(response.mappings().size(), equalTo(1));
|
||||
assertThat(response.mappings().get("index").size(), equalTo(0));
|
||||
|
||||
assertThat(response.fieldMappings("index", "type", "field"), nullValue());
|
||||
assertThat(response.fieldMappings("index", "field"), nullValue());
|
||||
}
|
||||
|
||||
private XContentBuilder getMappingForType(String type) throws IOException {
|
||||
|
@ -112,48 +112,26 @@ public class SimpleGetFieldMappingsIT extends OpenSearchIntegTestCase {
|
|||
GetFieldMappingsResponse response = client().admin()
|
||||
.indices()
|
||||
.prepareGetFieldMappings("indexa")
|
||||
.setTypes("typeA")
|
||||
.setFields("field1", "obj.subfield")
|
||||
.get();
|
||||
assertThat(response.fieldMappings("indexa", "typeA", "field1").fullName(), equalTo("field1"));
|
||||
assertThat(response.fieldMappings("indexa", "typeA", "field1").sourceAsMap(), hasKey("field1"));
|
||||
assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").fullName(), equalTo("obj.subfield"));
|
||||
assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").sourceAsMap(), hasKey("subfield"));
|
||||
assertThat(response.fieldMappings("indexb", "typeB", "field1"), nullValue());
|
||||
assertThat(response.fieldMappings("indexa", "field1").fullName(), equalTo("field1"));
|
||||
assertThat(response.fieldMappings("indexa", "field1").sourceAsMap(), hasKey("field1"));
|
||||
assertThat(response.fieldMappings("indexa", "obj.subfield").fullName(), equalTo("obj.subfield"));
|
||||
assertThat(response.fieldMappings("indexa", "obj.subfield").sourceAsMap(), hasKey("subfield"));
|
||||
|
||||
// Get mappings by name
|
||||
response = client().admin().indices().prepareGetFieldMappings("indexa").setTypes("typeA").setFields("field1", "obj.subfield").get();
|
||||
assertThat(response.fieldMappings("indexa", "typeA", "field1").fullName(), equalTo("field1"));
|
||||
assertThat(response.fieldMappings("indexa", "typeA", "field1").sourceAsMap(), hasKey("field1"));
|
||||
assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").fullName(), equalTo("obj.subfield"));
|
||||
assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").sourceAsMap(), hasKey("subfield"));
|
||||
assertThat(response.fieldMappings("indexa", "typeB", "field1"), nullValue());
|
||||
assertThat(response.fieldMappings("indexb", "typeB", "field1"), nullValue());
|
||||
response = client().admin().indices().prepareGetFieldMappings("indexa").setFields("field1", "obj.subfield").get();
|
||||
assertThat(response.fieldMappings("indexa", "field1").fullName(), equalTo("field1"));
|
||||
assertThat(response.fieldMappings("indexa", "field1").sourceAsMap(), hasKey("field1"));
|
||||
assertThat(response.fieldMappings("indexa", "obj.subfield").fullName(), equalTo("obj.subfield"));
|
||||
assertThat(response.fieldMappings("indexa", "obj.subfield").sourceAsMap(), hasKey("subfield"));
|
||||
|
||||
// get mappings by name across multiple indices
|
||||
response = client().admin().indices().prepareGetFieldMappings().setTypes("typeA").setFields("obj.subfield").get();
|
||||
assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").fullName(), equalTo("obj.subfield"));
|
||||
assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").sourceAsMap(), hasKey("subfield"));
|
||||
assertThat(response.fieldMappings("indexa", "typeB", "obj.subfield"), nullValue());
|
||||
assertThat(response.fieldMappings("indexb", "typeB", "obj.subfield"), nullValue());
|
||||
|
||||
// get mappings by name across multiple types
|
||||
response = client().admin().indices().prepareGetFieldMappings("indexa").setFields("obj.subfield").get();
|
||||
assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").fullName(), equalTo("obj.subfield"));
|
||||
assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").sourceAsMap(), hasKey("subfield"));
|
||||
assertThat(response.fieldMappings("indexa", "typeA", "field1"), nullValue());
|
||||
assertThat(response.fieldMappings("indexb", "typeB", "obj.subfield"), nullValue());
|
||||
assertThat(response.fieldMappings("indexb", "typeB", "field1"), nullValue());
|
||||
|
||||
// get mappings by name across multiple types & indices
|
||||
response = client().admin().indices().prepareGetFieldMappings().setFields("obj.subfield").get();
|
||||
assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").fullName(), equalTo("obj.subfield"));
|
||||
assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").sourceAsMap(), hasKey("subfield"));
|
||||
assertThat(response.fieldMappings("indexa", "typeA", "field1"), nullValue());
|
||||
assertThat(response.fieldMappings("indexb", "typeB", "field1"), nullValue());
|
||||
assertThat(response.fieldMappings("indexb", "typeB", "obj.subfield").fullName(), equalTo("obj.subfield"));
|
||||
assertThat(response.fieldMappings("indexb", "typeB", "obj.subfield").sourceAsMap(), hasKey("subfield"));
|
||||
assertThat(response.fieldMappings("indexb", "typeB", "field1"), nullValue());
|
||||
assertThat(response.fieldMappings("indexa", "obj.subfield").fullName(), equalTo("obj.subfield"));
|
||||
assertThat(response.fieldMappings("indexa", "obj.subfield").sourceAsMap(), hasKey("subfield"));
|
||||
assertThat(response.fieldMappings("indexb", "obj.subfield").fullName(), equalTo("obj.subfield"));
|
||||
assertThat(response.fieldMappings("indexb", "obj.subfield").sourceAsMap(), hasKey("subfield"));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@ -169,25 +147,16 @@ public class SimpleGetFieldMappingsIT extends OpenSearchIntegTestCase {
|
|||
.includeDefaults(true)
|
||||
.get();
|
||||
|
||||
assertThat((Map<String, Object>) response.fieldMappings("test", "num").sourceAsMap().get("num"), hasEntry("index", Boolean.TRUE));
|
||||
assertThat((Map<String, Object>) response.fieldMappings("test", "num").sourceAsMap().get("num"), hasEntry("type", "long"));
|
||||
assertThat(
|
||||
(Map<String, Object>) response.fieldMappings("test", "type", "num").sourceAsMap().get("num"),
|
||||
(Map<String, Object>) response.fieldMappings("test", "field1").sourceAsMap().get("field1"),
|
||||
hasEntry("index", Boolean.TRUE)
|
||||
);
|
||||
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "num").sourceAsMap().get("num"), hasEntry("type", "long"));
|
||||
assertThat((Map<String, Object>) response.fieldMappings("test", "field1").sourceAsMap().get("field1"), hasEntry("type", "text"));
|
||||
assertThat((Map<String, Object>) response.fieldMappings("test", "field2").sourceAsMap().get("field2"), hasEntry("type", "text"));
|
||||
assertThat(
|
||||
(Map<String, Object>) response.fieldMappings("test", "type", "field1").sourceAsMap().get("field1"),
|
||||
hasEntry("index", Boolean.TRUE)
|
||||
);
|
||||
assertThat(
|
||||
(Map<String, Object>) response.fieldMappings("test", "type", "field1").sourceAsMap().get("field1"),
|
||||
hasEntry("type", "text")
|
||||
);
|
||||
assertThat(
|
||||
(Map<String, Object>) response.fieldMappings("test", "type", "field2").sourceAsMap().get("field2"),
|
||||
hasEntry("type", "text")
|
||||
);
|
||||
assertThat(
|
||||
(Map<String, Object>) response.fieldMappings("test", "type", "obj.subfield").sourceAsMap().get("subfield"),
|
||||
(Map<String, Object>) response.fieldMappings("test", "obj.subfield").sourceAsMap().get("subfield"),
|
||||
hasEntry("type", "keyword")
|
||||
);
|
||||
}
|
||||
|
@ -198,12 +167,12 @@ public class SimpleGetFieldMappingsIT extends OpenSearchIntegTestCase {
|
|||
|
||||
GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings().setFields("alias", "field1").get();
|
||||
|
||||
FieldMappingMetadata aliasMapping = response.fieldMappings("test", "type", "alias");
|
||||
FieldMappingMetadata aliasMapping = response.fieldMappings("test", "alias");
|
||||
assertThat(aliasMapping.fullName(), equalTo("alias"));
|
||||
assertThat(aliasMapping.sourceAsMap(), hasKey("alias"));
|
||||
assertThat((Map<String, Object>) aliasMapping.sourceAsMap().get("alias"), hasEntry("type", "alias"));
|
||||
|
||||
FieldMappingMetadata field1Mapping = response.fieldMappings("test", "type", "field1");
|
||||
FieldMappingMetadata field1Mapping = response.fieldMappings("test", "field1");
|
||||
assertThat(field1Mapping.fullName(), equalTo("field1"));
|
||||
assertThat(field1Mapping.sourceAsMap(), hasKey("field1"));
|
||||
}
|
||||
|
@ -216,7 +185,6 @@ public class SimpleGetFieldMappingsIT extends OpenSearchIntegTestCase {
|
|||
GetFieldMappingsResponse response = client().admin()
|
||||
.indices()
|
||||
.prepareGetFieldMappings("index")
|
||||
.setTypes("type")
|
||||
.setFields("field1", "obj.subfield")
|
||||
.get();
|
||||
XContentBuilder responseBuilder = XContentFactory.jsonBuilder().prettyPrint();
|
||||
|
@ -229,7 +197,7 @@ public class SimpleGetFieldMappingsIT extends OpenSearchIntegTestCase {
|
|||
|
||||
params.put("pretty", "false");
|
||||
|
||||
response = client().admin().indices().prepareGetFieldMappings("index").setTypes("type").setFields("field1", "obj.subfield").get();
|
||||
response = client().admin().indices().prepareGetFieldMappings("index").setFields("field1", "obj.subfield").get();
|
||||
responseBuilder = XContentFactory.jsonBuilder().prettyPrint().lfAtEnd();
|
||||
response.toXContent(responseBuilder, new ToXContent.MapParams(params));
|
||||
responseStrings = Strings.toString(responseBuilder);
|
||||
|
@ -249,10 +217,9 @@ public class SimpleGetFieldMappingsIT extends OpenSearchIntegTestCase {
|
|||
GetFieldMappingsResponse response = client().admin()
|
||||
.indices()
|
||||
.prepareGetFieldMappings("test")
|
||||
.setTypes("_doc")
|
||||
.setFields("field1", "obj.subfield")
|
||||
.get();
|
||||
assertThat(response.fieldMappings("test", "_doc", "field1").fullName(), equalTo("field1"));
|
||||
assertThat(response.fieldMappings("test", "field1").fullName(), equalTo("field1"));
|
||||
} finally {
|
||||
disableIndexBlock("test", block);
|
||||
}
|
||||
|
|
|
@ -205,7 +205,10 @@ public class UpdateMappingIntegrationIT extends OpenSearchIntegTestCase {
|
|||
client().admin()
|
||||
.indices()
|
||||
.preparePutMapping("test")
|
||||
.setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"integer\"}}}}", XContentType.JSON)
|
||||
.setSource(
|
||||
"{\"" + MapperService.SINGLE_MAPPING_NAME + "\":{\"properties\":{\"body\":{\"type\":\"integer\"}}}}",
|
||||
XContentType.JSON
|
||||
)
|
||||
.execute()
|
||||
.actionGet();
|
||||
fail("Expected MergeMappingException");
|
||||
|
@ -225,7 +228,10 @@ public class UpdateMappingIntegrationIT extends OpenSearchIntegTestCase {
|
|||
client().admin()
|
||||
.indices()
|
||||
.preparePutMapping("test")
|
||||
.setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": true }}}}", XContentType.JSON)
|
||||
.setSource(
|
||||
"{\"" + MapperService.SINGLE_MAPPING_NAME + "\":{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": true }}}}",
|
||||
XContentType.JSON
|
||||
)
|
||||
.execute()
|
||||
.actionGet();
|
||||
fail("Expected MergeMappingException");
|
||||
|
@ -242,7 +248,11 @@ public class UpdateMappingIntegrationIT extends OpenSearchIntegTestCase {
|
|||
.indices()
|
||||
.prepareCreate("test")
|
||||
.setSettings(Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0))
|
||||
.addMapping("type", "{\"type\":{\"properties\":{\"body\":{\"type\":\"text\"}}}}", XContentType.JSON)
|
||||
.addMapping(
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"{\"" + MapperService.SINGLE_MAPPING_NAME + "\":{\"properties\":{\"body\":{\"type\":\"text\"}}}}",
|
||||
XContentType.JSON
|
||||
)
|
||||
.execute()
|
||||
.actionGet();
|
||||
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
|
||||
|
@ -250,7 +260,7 @@ public class UpdateMappingIntegrationIT extends OpenSearchIntegTestCase {
|
|||
AcknowledgedResponse putMappingResponse = client().admin()
|
||||
.indices()
|
||||
.preparePutMapping("test")
|
||||
.setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"text\"}}}}", XContentType.JSON)
|
||||
.setSource("{\"properties\":{\"body\":{\"type\":\"text\"}}}", XContentType.JSON)
|
||||
.execute()
|
||||
.actionGet();
|
||||
|
||||
|
|
|
@ -486,7 +486,7 @@ public class SearchFieldsIT extends OpenSearchIntegTestCase {
|
|||
assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i)));
|
||||
Set<String> fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet());
|
||||
assertThat(fields, equalTo(singleton("type")));
|
||||
assertThat(response.getHits().getAt(i).getFields().get("type").getValue(), equalTo("type1"));
|
||||
assertThat(response.getHits().getAt(i).getFields().get("type").getValue(), equalTo(MapperService.SINGLE_MAPPING_NAME));
|
||||
}
|
||||
|
||||
response = client().prepareSearch()
|
||||
|
@ -504,7 +504,7 @@ public class SearchFieldsIT extends OpenSearchIntegTestCase {
|
|||
assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i)));
|
||||
Set<String> fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet());
|
||||
assertThat(fields, equalTo(newHashSet("type", "id")));
|
||||
assertThat(response.getHits().getAt(i).getFields().get("type").getValue(), equalTo("type1"));
|
||||
assertThat(response.getHits().getAt(i).getFields().get("type").getValue(), equalTo(MapperService.SINGLE_MAPPING_NAME));
|
||||
assertThat(response.getHits().getAt(i).getFields().get("id").getValue(), equalTo(Integer.toString(i)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1987,41 +1987,6 @@ public class SearchQueryIT extends OpenSearchIntegTestCase {
|
|||
assertHitCount(searchResponse, 1);
|
||||
}
|
||||
|
||||
public void testRangeQueryTypeField_31476() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping("foo", "field", "type=keyword"));
|
||||
|
||||
client().prepareIndex("test").setId("1").setSource("field", "value").get();
|
||||
refresh();
|
||||
|
||||
RangeQueryBuilder range = new RangeQueryBuilder("_type").from("ape").to("zebra");
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setQuery(range).get();
|
||||
assertHitCount(searchResponse, 1);
|
||||
|
||||
range = new RangeQueryBuilder("_type").from("monkey").to("zebra");
|
||||
searchResponse = client().prepareSearch("test").setQuery(range).get();
|
||||
assertHitCount(searchResponse, 0);
|
||||
|
||||
range = new RangeQueryBuilder("_type").from("ape").to("donkey");
|
||||
searchResponse = client().prepareSearch("test").setQuery(range).get();
|
||||
assertHitCount(searchResponse, 0);
|
||||
|
||||
range = new RangeQueryBuilder("_type").from("ape").to("foo").includeUpper(false);
|
||||
searchResponse = client().prepareSearch("test").setQuery(range).get();
|
||||
assertHitCount(searchResponse, 0);
|
||||
|
||||
range = new RangeQueryBuilder("_type").from("ape").to("foo").includeUpper(true);
|
||||
searchResponse = client().prepareSearch("test").setQuery(range).get();
|
||||
assertHitCount(searchResponse, 1);
|
||||
|
||||
range = new RangeQueryBuilder("_type").from("foo").to("zebra").includeLower(false);
|
||||
searchResponse = client().prepareSearch("test").setQuery(range).get();
|
||||
assertHitCount(searchResponse, 0);
|
||||
|
||||
range = new RangeQueryBuilder("_type").from("foo").to("zebra").includeLower(true);
|
||||
searchResponse = client().prepareSearch("test").setQuery(range).get();
|
||||
assertHitCount(searchResponse, 1);
|
||||
}
|
||||
|
||||
public void testNestedQueryWithFieldAlias() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
|
|
|
@ -41,9 +41,7 @@ import org.opensearch.cluster.metadata.IndexMetadata;
|
|||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.index.Index;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
|
@ -61,7 +59,7 @@ public class CreateIndexClusterStateUpdateRequest extends ClusterStateUpdateRequ
|
|||
|
||||
private Settings settings = Settings.Builder.EMPTY_SETTINGS;
|
||||
|
||||
private final Map<String, String> mappings = new HashMap<>();
|
||||
private String mappings = "{}";
|
||||
|
||||
private final Set<Alias> aliases = new HashSet<>();
|
||||
|
||||
|
@ -80,8 +78,8 @@ public class CreateIndexClusterStateUpdateRequest extends ClusterStateUpdateRequ
|
|||
return this;
|
||||
}
|
||||
|
||||
public CreateIndexClusterStateUpdateRequest mappings(Map<String, String> mappings) {
|
||||
this.mappings.putAll(mappings);
|
||||
public CreateIndexClusterStateUpdateRequest mappings(String mappings) {
|
||||
this.mappings = mappings;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -122,7 +120,7 @@ public class CreateIndexClusterStateUpdateRequest extends ClusterStateUpdateRequ
|
|||
return settings;
|
||||
}
|
||||
|
||||
public Map<String, String> mappings() {
|
||||
public String mappings() {
|
||||
return mappings;
|
||||
}
|
||||
|
||||
|
|
|
@ -35,6 +35,7 @@ package org.opensearch.action.admin.indices.create;
|
|||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.OpenSearchGenerationException;
|
||||
import org.opensearch.OpenSearchParseException;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.ActionRequestValidationException;
|
||||
import org.opensearch.action.IndicesRequest;
|
||||
import org.opensearch.action.admin.indices.alias.Alias;
|
||||
|
@ -46,7 +47,6 @@ import org.opensearch.common.ParseField;
|
|||
import org.opensearch.common.Strings;
|
||||
import org.opensearch.common.bytes.BytesArray;
|
||||
import org.opensearch.common.bytes.BytesReference;
|
||||
import org.opensearch.common.collect.MapBuilder;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
import org.opensearch.common.io.stream.StreamOutput;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
|
@ -58,9 +58,10 @@ import org.opensearch.common.xcontent.XContentFactory;
|
|||
import org.opensearch.common.xcontent.XContentHelper;
|
||||
import org.opensearch.common.xcontent.XContentParser;
|
||||
import org.opensearch.common.xcontent.XContentType;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
@ -92,7 +93,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
|
||||
private Settings settings = EMPTY_SETTINGS;
|
||||
|
||||
private final Map<String, String> mappings = new HashMap<>();
|
||||
private String mappings = "{}";
|
||||
|
||||
private final Set<Alias> aliases = new HashSet<>();
|
||||
|
||||
|
@ -103,11 +104,21 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
cause = in.readString();
|
||||
index = in.readString();
|
||||
settings = readSettingsFromStream(in);
|
||||
int size = in.readVInt();
|
||||
for (int i = 0; i < size; i++) {
|
||||
final String type = in.readString();
|
||||
String source = in.readString();
|
||||
mappings.put(type, source);
|
||||
if (in.getVersion().before(Version.V_2_0_0)) {
|
||||
int size = in.readVInt();
|
||||
if (size == 1) {
|
||||
String type = in.readString();
|
||||
if (MapperService.SINGLE_MAPPING_NAME.equals(type) == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"Expected to receive mapping type of [" + MapperService.SINGLE_MAPPING_NAME + "] but got [" + type + "]"
|
||||
);
|
||||
}
|
||||
mappings = in.readString();
|
||||
} else if (size != 0) {
|
||||
throw new IllegalStateException("Expected to read 0 or 1 mappings, but received " + size);
|
||||
}
|
||||
} else {
|
||||
mappings = in.readString();
|
||||
}
|
||||
int aliasesSize = in.readVInt();
|
||||
for (int i = 0; i < aliasesSize; i++) {
|
||||
|
@ -221,6 +232,19 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the mapping for this index
|
||||
*
|
||||
* The mapping should be in the form of a JSON string, with an outer _doc key
|
||||
* <pre>
|
||||
* .mapping("{\"_doc\":{\"properties\": ... }}")
|
||||
* </pre>
|
||||
*/
|
||||
public CreateIndexRequest mapping(String mapping) {
|
||||
this.mappings = mapping;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds mapping that will be added when the index gets created.
|
||||
*
|
||||
|
@ -249,14 +273,6 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
return mapping(type, mappingAsMap);
|
||||
}
|
||||
|
||||
/**
|
||||
* The cause for this index creation.
|
||||
*/
|
||||
public CreateIndexRequest cause(String cause) {
|
||||
this.cause = cause;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds mapping that will be added when the index gets created.
|
||||
*
|
||||
|
@ -278,18 +294,17 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
*/
|
||||
@Deprecated
|
||||
public CreateIndexRequest mapping(String type, Map<String, ?> source) {
|
||||
if (mappings.containsKey(type)) {
|
||||
throw new IllegalStateException("mappings for type \"" + type + "\" were already defined");
|
||||
}
|
||||
// wrap it in a type map if its not
|
||||
if (source.size() != 1 || !source.containsKey(type)) {
|
||||
source = MapBuilder.<String, Object>newMapBuilder().put(type, source).map();
|
||||
source = Collections.singletonMap(MapperService.SINGLE_MAPPING_NAME, source);
|
||||
} else if (MapperService.SINGLE_MAPPING_NAME.equals(type) == false) {
|
||||
// if it has a different type name, then unwrap and rewrap with _doc
|
||||
source = Collections.singletonMap(MapperService.SINGLE_MAPPING_NAME, source.get(type));
|
||||
}
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.map(source);
|
||||
mappings.put(type, Strings.toString(builder));
|
||||
return this;
|
||||
return mapping(Strings.toString(builder));
|
||||
} catch (IOException e) {
|
||||
throw new OpenSearchGenerationException("Failed to generate [" + source + "]", e);
|
||||
}
|
||||
|
@ -306,6 +321,14 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The cause for this index creation.
|
||||
*/
|
||||
public CreateIndexRequest cause(String cause) {
|
||||
this.cause = cause;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the aliases that will be associated with the index when it gets created
|
||||
*/
|
||||
|
@ -421,7 +444,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
return this;
|
||||
}
|
||||
|
||||
public Map<String, String> mappings() {
|
||||
public String mappings() {
|
||||
return this.mappings;
|
||||
}
|
||||
|
||||
|
@ -467,10 +490,16 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
out.writeString(cause);
|
||||
out.writeString(index);
|
||||
writeSettingsToStream(settings, out);
|
||||
out.writeVInt(mappings.size());
|
||||
for (Map.Entry<String, String> entry : mappings.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
out.writeString(entry.getValue());
|
||||
if (out.getVersion().before(Version.V_2_0_0)) {
|
||||
if ("{}".equals(mappings)) {
|
||||
out.writeVInt(0);
|
||||
} else {
|
||||
out.writeVInt(1);
|
||||
out.writeString(MapperService.SINGLE_MAPPING_NAME);
|
||||
out.writeString(mappings);
|
||||
}
|
||||
} else {
|
||||
out.writeString(mappings);
|
||||
}
|
||||
out.writeVInt(aliases.size());
|
||||
for (Alias alias : aliases) {
|
||||
|
|
|
@ -32,10 +32,12 @@
|
|||
|
||||
package org.opensearch.action.admin.indices.mapping.get;
|
||||
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.ActionRequestValidationException;
|
||||
import org.opensearch.action.OriginalIndices;
|
||||
import org.opensearch.action.support.IndicesOptions;
|
||||
import org.opensearch.action.support.single.shard.SingleShardRequest;
|
||||
import org.opensearch.common.Strings;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
import org.opensearch.common.io.stream.StreamOutput;
|
||||
|
||||
|
@ -43,26 +45,26 @@ import java.io.IOException;
|
|||
|
||||
public class GetFieldMappingsIndexRequest extends SingleShardRequest<GetFieldMappingsIndexRequest> {
|
||||
|
||||
private final boolean probablySingleFieldRequest;
|
||||
private final boolean includeDefaults;
|
||||
private final String[] fields;
|
||||
private final String[] types;
|
||||
|
||||
private OriginalIndices originalIndices;
|
||||
private final OriginalIndices originalIndices;
|
||||
|
||||
GetFieldMappingsIndexRequest(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
types = in.readStringArray();
|
||||
if (in.getVersion().before(Version.V_2_0_0)) {
|
||||
in.readStringArray(); // removed types array
|
||||
}
|
||||
fields = in.readStringArray();
|
||||
includeDefaults = in.readBoolean();
|
||||
probablySingleFieldRequest = in.readBoolean();
|
||||
if (in.getVersion().before(Version.V_2_0_0)) {
|
||||
in.readBoolean(); // removed probablySingleField boolean
|
||||
}
|
||||
originalIndices = OriginalIndices.readOriginalIndices(in);
|
||||
}
|
||||
|
||||
GetFieldMappingsIndexRequest(GetFieldMappingsRequest other, String index, boolean probablySingleFieldRequest) {
|
||||
this.probablySingleFieldRequest = probablySingleFieldRequest;
|
||||
GetFieldMappingsIndexRequest(GetFieldMappingsRequest other, String index) {
|
||||
this.includeDefaults = other.includeDefaults();
|
||||
this.types = other.types();
|
||||
this.fields = other.fields();
|
||||
assert index != null;
|
||||
this.index(index);
|
||||
|
@ -74,18 +76,10 @@ public class GetFieldMappingsIndexRequest extends SingleShardRequest<GetFieldMap
|
|||
return null;
|
||||
}
|
||||
|
||||
public String[] types() {
|
||||
return types;
|
||||
}
|
||||
|
||||
public String[] fields() {
|
||||
return fields;
|
||||
}
|
||||
|
||||
public boolean probablySingleFieldRequest() {
|
||||
return probablySingleFieldRequest;
|
||||
}
|
||||
|
||||
public boolean includeDefaults() {
|
||||
return includeDefaults;
|
||||
}
|
||||
|
@ -103,10 +97,14 @@ public class GetFieldMappingsIndexRequest extends SingleShardRequest<GetFieldMap
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeStringArray(types);
|
||||
if (out.getVersion().before(Version.V_2_0_0)) {
|
||||
out.writeStringArray(Strings.EMPTY_ARRAY);
|
||||
}
|
||||
out.writeStringArray(fields);
|
||||
out.writeBoolean(includeDefaults);
|
||||
out.writeBoolean(probablySingleFieldRequest);
|
||||
if (out.getVersion().before(Version.V_2_0_0)) {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
OriginalIndices.writeOriginalIndices(originalIndices, out);
|
||||
}
|
||||
|
||||
|
|
|
@ -32,6 +32,7 @@
|
|||
|
||||
package org.opensearch.action.admin.indices.mapping.get;
|
||||
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.ActionRequest;
|
||||
import org.opensearch.action.ActionRequestValidationException;
|
||||
import org.opensearch.action.IndicesRequest;
|
||||
|
@ -41,6 +42,7 @@ import org.opensearch.common.io.stream.StreamInput;
|
|||
import org.opensearch.common.io.stream.StreamOutput;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* Request the mappings of specific fields
|
||||
|
@ -57,7 +59,6 @@ public class GetFieldMappingsRequest extends ActionRequest implements IndicesReq
|
|||
private boolean includeDefaults = false;
|
||||
|
||||
private String[] indices = Strings.EMPTY_ARRAY;
|
||||
private String[] types = Strings.EMPTY_ARRAY;
|
||||
|
||||
private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpen();
|
||||
|
||||
|
@ -66,7 +67,12 @@ public class GetFieldMappingsRequest extends ActionRequest implements IndicesReq
|
|||
public GetFieldMappingsRequest(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
indices = in.readStringArray();
|
||||
types = in.readStringArray();
|
||||
if (in.getVersion().before(Version.V_2_0_0)) {
|
||||
String[] types = in.readStringArray();
|
||||
if (types != Strings.EMPTY_ARRAY) {
|
||||
throw new IllegalArgumentException("Expected empty type array but received [" + Arrays.toString(types) + "]");
|
||||
}
|
||||
}
|
||||
indicesOptions = IndicesOptions.readIndicesOptions(in);
|
||||
local = in.readBoolean();
|
||||
fields = in.readStringArray();
|
||||
|
@ -92,11 +98,6 @@ public class GetFieldMappingsRequest extends ActionRequest implements IndicesReq
|
|||
return this;
|
||||
}
|
||||
|
||||
public GetFieldMappingsRequest types(String... types) {
|
||||
this.types = types;
|
||||
return this;
|
||||
}
|
||||
|
||||
public GetFieldMappingsRequest indicesOptions(IndicesOptions indicesOptions) {
|
||||
this.indicesOptions = indicesOptions;
|
||||
return this;
|
||||
|
@ -107,10 +108,6 @@ public class GetFieldMappingsRequest extends ActionRequest implements IndicesReq
|
|||
return indices;
|
||||
}
|
||||
|
||||
public String[] types() {
|
||||
return types;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndicesOptions indicesOptions() {
|
||||
return indicesOptions;
|
||||
|
@ -150,7 +147,9 @@ public class GetFieldMappingsRequest extends ActionRequest implements IndicesReq
|
|||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeStringArray(indices);
|
||||
out.writeStringArray(types);
|
||||
if (out.getVersion().before(Version.V_2_0_0)) {
|
||||
out.writeStringArray(Strings.EMPTY_ARRAY);
|
||||
}
|
||||
indicesOptions.writeIndicesOptions(out);
|
||||
out.writeBoolean(local);
|
||||
out.writeStringArray(fields);
|
||||
|
|
|
@ -54,16 +54,6 @@ public class GetFieldMappingsRequestBuilder extends ActionRequestBuilder<GetFiel
|
|||
return this;
|
||||
}
|
||||
|
||||
public GetFieldMappingsRequestBuilder setTypes(String... types) {
|
||||
request.types(types);
|
||||
return this;
|
||||
}
|
||||
|
||||
public GetFieldMappingsRequestBuilder addTypes(String... types) {
|
||||
request.types(ArrayUtils.concat(request.types(), types));
|
||||
return this;
|
||||
}
|
||||
|
||||
public GetFieldMappingsRequestBuilder setIndicesOptions(IndicesOptions indicesOptions) {
|
||||
request.indicesOptions(indicesOptions);
|
||||
return this;
|
||||
|
|
|
@ -32,9 +32,9 @@
|
|||
|
||||
package org.opensearch.action.admin.indices.mapping.get;
|
||||
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.ActionResponse;
|
||||
import org.opensearch.common.ParseField;
|
||||
import org.opensearch.common.bytes.BytesArray;
|
||||
import org.opensearch.common.bytes.BytesReference;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
import org.opensearch.common.io.stream.StreamOutput;
|
||||
|
@ -47,6 +47,7 @@ import org.opensearch.common.xcontent.XContentHelper;
|
|||
import org.opensearch.common.xcontent.XContentParser;
|
||||
import org.opensearch.common.xcontent.XContentType;
|
||||
import org.opensearch.index.mapper.Mapper;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
@ -97,38 +98,37 @@ public class GetFieldMappingsResponse extends ActionResponse implements ToXConte
|
|||
}, MAPPINGS, ObjectParser.ValueType.OBJECT);
|
||||
}
|
||||
|
||||
// todo remove middle `type` level
|
||||
private final Map<String, Map<String, Map<String, FieldMappingMetadata>>> mappings;
|
||||
private final Map<String, Map<String, FieldMappingMetadata>> mappings;
|
||||
|
||||
GetFieldMappingsResponse(Map<String, Map<String, Map<String, FieldMappingMetadata>>> mappings) {
|
||||
GetFieldMappingsResponse(Map<String, Map<String, FieldMappingMetadata>> mappings) {
|
||||
this.mappings = mappings;
|
||||
}
|
||||
|
||||
GetFieldMappingsResponse(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
int size = in.readVInt();
|
||||
Map<String, Map<String, Map<String, FieldMappingMetadata>>> indexMapBuilder = new HashMap<>(size);
|
||||
Map<String, Map<String, FieldMappingMetadata>> indexMapBuilder = new HashMap<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
String index = in.readString();
|
||||
int typesSize = in.readVInt();
|
||||
Map<String, Map<String, FieldMappingMetadata>> typeMapBuilder = new HashMap<>(typesSize);
|
||||
for (int j = 0; j < typesSize; j++) {
|
||||
String type = in.readString();
|
||||
int fieldSize = in.readVInt();
|
||||
Map<String, FieldMappingMetadata> fieldMapBuilder = new HashMap<>(fieldSize);
|
||||
for (int k = 0; k < fieldSize; k++) {
|
||||
fieldMapBuilder.put(in.readString(), new FieldMappingMetadata(in.readString(), in.readBytesReference()));
|
||||
if (in.getVersion().before(Version.V_2_0_0)) {
|
||||
int typesSize = in.readVInt();
|
||||
if (typesSize != 1) {
|
||||
throw new IllegalStateException("Expected single type but received [" + typesSize + "]");
|
||||
}
|
||||
typeMapBuilder.put(type, unmodifiableMap(fieldMapBuilder));
|
||||
in.readString(); // type
|
||||
}
|
||||
indexMapBuilder.put(index, unmodifiableMap(typeMapBuilder));
|
||||
int fieldSize = in.readVInt();
|
||||
Map<String, FieldMappingMetadata> fieldMapBuilder = new HashMap<>(fieldSize);
|
||||
for (int k = 0; k < fieldSize; k++) {
|
||||
fieldMapBuilder.put(in.readString(), new FieldMappingMetadata(in.readString(), in.readBytesReference()));
|
||||
}
|
||||
indexMapBuilder.put(index, unmodifiableMap(fieldMapBuilder));
|
||||
}
|
||||
mappings = unmodifiableMap(indexMapBuilder);
|
||||
|
||||
}
|
||||
|
||||
/** returns the retrieved field mapping. The return map keys are index, type, field (as specified in the request). */
|
||||
public Map<String, Map<String, Map<String, FieldMappingMetadata>>> mappings() {
|
||||
public Map<String, Map<String, FieldMappingMetadata>> mappings() {
|
||||
return mappings;
|
||||
}
|
||||
|
||||
|
@ -138,32 +138,23 @@ public class GetFieldMappingsResponse extends ActionResponse implements ToXConte
|
|||
* @param field field name as specified in the {@link GetFieldMappingsRequest}
|
||||
* @return FieldMappingMetadata for the requested field or null if not found.
|
||||
*/
|
||||
public FieldMappingMetadata fieldMappings(String index, String type, String field) {
|
||||
Map<String, Map<String, FieldMappingMetadata>> indexMapping = mappings.get(index);
|
||||
public FieldMappingMetadata fieldMappings(String index, String field) {
|
||||
Map<String, FieldMappingMetadata> indexMapping = mappings.get(index);
|
||||
if (indexMapping == null) {
|
||||
return null;
|
||||
}
|
||||
Map<String, FieldMappingMetadata> typeMapping = indexMapping.get(type);
|
||||
if (typeMapping == null) {
|
||||
return null;
|
||||
}
|
||||
return typeMapping.get(field);
|
||||
return indexMapping.get(field);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
for (Map.Entry<String, Map<String, Map<String, FieldMappingMetadata>>> indexEntry : mappings.entrySet()) {
|
||||
for (Map.Entry<String, Map<String, FieldMappingMetadata>> indexEntry : mappings.entrySet()) {
|
||||
builder.startObject(indexEntry.getKey());
|
||||
builder.startObject(MAPPINGS.getPreferredName());
|
||||
|
||||
Map<String, FieldMappingMetadata> mappings = null;
|
||||
for (Map.Entry<String, Map<String, FieldMappingMetadata>> typeEntry : indexEntry.getValue().entrySet()) {
|
||||
assert mappings == null;
|
||||
mappings = typeEntry.getValue();
|
||||
}
|
||||
if (mappings != null) {
|
||||
addFieldMappingsToBuilder(builder, params, mappings);
|
||||
addFieldMappingsToBuilder(builder, params, indexEntry.getValue());
|
||||
}
|
||||
|
||||
builder.endObject();
|
||||
|
@ -183,7 +174,6 @@ public class GetFieldMappingsResponse extends ActionResponse implements ToXConte
|
|||
}
|
||||
|
||||
public static class FieldMappingMetadata implements ToXContentFragment {
|
||||
public static final FieldMappingMetadata NULL = new FieldMappingMetadata("", BytesArray.EMPTY);
|
||||
|
||||
private static final ParseField FULL_NAME = new ParseField("full_name");
|
||||
private static final ParseField MAPPING = new ParseField("mapping");
|
||||
|
@ -220,10 +210,6 @@ public class GetFieldMappingsResponse extends ActionResponse implements ToXConte
|
|||
return XContentHelper.convertToMap(source, true, XContentType.JSON).v2();
|
||||
}
|
||||
|
||||
public boolean isNull() {
|
||||
return NULL.fullName().equals(fullName) && NULL.source.length() == source.length();
|
||||
}
|
||||
|
||||
// pkg-private for testing
|
||||
BytesReference getSource() {
|
||||
return source;
|
||||
|
@ -268,18 +254,18 @@ public class GetFieldMappingsResponse extends ActionResponse implements ToXConte
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(mappings.size());
|
||||
for (Map.Entry<String, Map<String, Map<String, FieldMappingMetadata>>> indexEntry : mappings.entrySet()) {
|
||||
for (Map.Entry<String, Map<String, FieldMappingMetadata>> indexEntry : mappings.entrySet()) {
|
||||
out.writeString(indexEntry.getKey());
|
||||
if (out.getVersion().before(Version.V_2_0_0)) {
|
||||
out.writeVInt(1);
|
||||
out.writeString(MapperService.SINGLE_MAPPING_NAME);
|
||||
}
|
||||
out.writeVInt(indexEntry.getValue().size());
|
||||
for (Map.Entry<String, Map<String, FieldMappingMetadata>> typeEntry : indexEntry.getValue().entrySet()) {
|
||||
out.writeString(typeEntry.getKey());
|
||||
out.writeVInt(typeEntry.getValue().size());
|
||||
for (Map.Entry<String, FieldMappingMetadata> fieldEntry : typeEntry.getValue().entrySet()) {
|
||||
out.writeString(fieldEntry.getKey());
|
||||
FieldMappingMetadata fieldMapping = fieldEntry.getValue();
|
||||
out.writeString(fieldMapping.fullName());
|
||||
out.writeBytesReference(fieldMapping.source);
|
||||
}
|
||||
for (Map.Entry<String, FieldMappingMetadata> fieldEntry : indexEntry.getValue().entrySet()) {
|
||||
out.writeString(fieldEntry.getKey());
|
||||
FieldMappingMetadata fieldMapping = fieldEntry.getValue();
|
||||
out.writeString(fieldMapping.fullName());
|
||||
out.writeBytesReference(fieldMapping.source);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -112,13 +112,13 @@ public class GetMappingsResponse extends ActionResponse implements ToXContentFra
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
for (final ObjectObjectCursor<String, MappingMetadata> indexEntry : getMappings()) {
|
||||
builder.startObject(indexEntry.key);
|
||||
if (indexEntry.value != null) {
|
||||
builder.startObject(indexEntry.key);
|
||||
builder.field(MAPPINGS.getPreferredName(), indexEntry.value.sourceAsMap());
|
||||
builder.endObject();
|
||||
} else {
|
||||
builder.startObject(MAPPINGS.getPreferredName()).endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -81,9 +81,8 @@ public class TransportGetFieldMappingsAction extends HandledTransportAction<GetF
|
|||
if (concreteIndices.length == 0) {
|
||||
listener.onResponse(new GetFieldMappingsResponse(emptyMap()));
|
||||
} else {
|
||||
boolean probablySingleFieldRequest = concreteIndices.length == 1 && request.types().length == 1 && request.fields().length == 1;
|
||||
for (final String index : concreteIndices) {
|
||||
GetFieldMappingsIndexRequest shardRequest = new GetFieldMappingsIndexRequest(request, index, probablySingleFieldRequest);
|
||||
GetFieldMappingsIndexRequest shardRequest = new GetFieldMappingsIndexRequest(request, index);
|
||||
shardAction.execute(shardRequest, new ActionListener<GetFieldMappingsResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetFieldMappingsResponse result) {
|
||||
|
@ -107,7 +106,7 @@ public class TransportGetFieldMappingsAction extends HandledTransportAction<GetF
|
|||
}
|
||||
|
||||
private GetFieldMappingsResponse merge(AtomicReferenceArray<Object> indexResponses) {
|
||||
Map<String, Map<String, Map<String, GetFieldMappingsResponse.FieldMappingMetadata>>> mergedResponses = new HashMap<>();
|
||||
Map<String, Map<String, GetFieldMappingsResponse.FieldMappingMetadata>> mergedResponses = new HashMap<>();
|
||||
for (int i = 0; i < indexResponses.length(); i++) {
|
||||
Object element = indexResponses.get(i);
|
||||
if (element instanceof GetFieldMappingsResponse) {
|
||||
|
|
|
@ -56,12 +56,10 @@ import org.opensearch.index.mapper.DocumentMapper;
|
|||
import org.opensearch.index.mapper.Mapper;
|
||||
import org.opensearch.index.shard.ShardId;
|
||||
import org.opensearch.indices.IndicesService;
|
||||
import org.opensearch.indices.TypeMissingException;
|
||||
import org.opensearch.threadpool.ThreadPool;
|
||||
import org.opensearch.transport.TransportService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
@ -124,28 +122,9 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc
|
|||
Predicate<String> metadataFieldPredicate = (f) -> indicesService.isMetadataField(indexCreatedVersion, f);
|
||||
Predicate<String> fieldPredicate = metadataFieldPredicate.or(indicesService.getFieldFilter().apply(shardId.getIndexName()));
|
||||
|
||||
DocumentMapper mapper = indexService.mapperService().documentMapper();
|
||||
Collection<String> typeIntersection;
|
||||
if (request.types().length == 0) {
|
||||
typeIntersection = mapper == null ? Collections.emptySet() : Collections.singleton(mapper.type());
|
||||
} else {
|
||||
typeIntersection = mapper != null && Regex.simpleMatch(request.types(), mapper.type())
|
||||
? Collections.singleton(mapper.type())
|
||||
: Collections.emptySet();
|
||||
if (typeIntersection.isEmpty()) {
|
||||
throw new TypeMissingException(shardId.getIndex(), request.types());
|
||||
}
|
||||
}
|
||||
|
||||
Map<String, Map<String, FieldMappingMetadata>> typeMappings = new HashMap<>();
|
||||
for (String type : typeIntersection) {
|
||||
DocumentMapper documentMapper = indexService.mapperService().documentMapper(type);
|
||||
Map<String, FieldMappingMetadata> fieldMapping = findFieldMappingsByType(fieldPredicate, documentMapper, request);
|
||||
if (!fieldMapping.isEmpty()) {
|
||||
typeMappings.put(type, fieldMapping);
|
||||
}
|
||||
}
|
||||
return new GetFieldMappingsResponse(singletonMap(shardId.getIndexName(), Collections.unmodifiableMap(typeMappings)));
|
||||
DocumentMapper documentMapper = indexService.mapperService().documentMapper();
|
||||
Map<String, FieldMappingMetadata> fieldMapping = findFieldMappings(fieldPredicate, documentMapper, request);
|
||||
return new GetFieldMappingsResponse(singletonMap(shardId.getIndexName(), fieldMapping));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -195,11 +174,14 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc
|
|||
}
|
||||
};
|
||||
|
||||
private static Map<String, FieldMappingMetadata> findFieldMappingsByType(
|
||||
private static Map<String, FieldMappingMetadata> findFieldMappings(
|
||||
Predicate<String> fieldPredicate,
|
||||
DocumentMapper documentMapper,
|
||||
GetFieldMappingsIndexRequest request
|
||||
) {
|
||||
if (documentMapper == null) {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
Map<String, FieldMappingMetadata> fieldMappings = new HashMap<>();
|
||||
final MappingLookup allFieldMappers = documentMapper.mappers();
|
||||
for (String field : request.fields()) {
|
||||
|
@ -218,8 +200,6 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc
|
|||
Mapper fieldMapper = allFieldMappers.getMapper(field);
|
||||
if (fieldMapper != null) {
|
||||
addFieldMapper(fieldPredicate, field, fieldMapper, fieldMappings, request.includeDefaults());
|
||||
} else if (request.probablySingleFieldRequest()) {
|
||||
fieldMappings.put(field, FieldMappingMetadata.NULL);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -390,7 +390,9 @@ public class MetadataRolloverService {
|
|||
if (Strings.isNullOrEmpty(newIndexName) == false) {
|
||||
throw new IllegalArgumentException("new index name may not be specified when rolling over a data stream");
|
||||
}
|
||||
if ((request.settings().equals(Settings.EMPTY) == false) || (request.aliases().size() > 0) || (request.mappings().size() > 0)) {
|
||||
if ((request.settings().equals(Settings.EMPTY) == false)
|
||||
|| (request.aliases().size() > 0)
|
||||
|| (request.mappings().equals("{}") == false)) {
|
||||
throw new IllegalArgumentException(
|
||||
"aliases, mappings, and index settings may not be specified when rolling over a data stream"
|
||||
);
|
||||
|
|
|
@ -252,8 +252,8 @@ public class TransportSimulateIndexTemplateAction extends TransportMasterNodeRea
|
|||
Map<String, AliasMetadata> aliasesByName = aliases.stream().collect(Collectors.toMap(AliasMetadata::getAlias, Function.identity()));
|
||||
|
||||
// empty request mapping as the user can't specify any explicit mappings via the simulate api
|
||||
List<Map<String, Map<String, Object>>> mappings = MetadataCreateIndexService.collectV2Mappings(
|
||||
Collections.emptyMap(),
|
||||
List<Map<String, Object>> mappings = MetadataCreateIndexService.collectV2Mappings(
|
||||
"{}",
|
||||
simulatedState,
|
||||
matchingTemplate,
|
||||
xContentRegistry,
|
||||
|
@ -264,11 +264,9 @@ public class TransportSimulateIndexTemplateAction extends TransportMasterNodeRea
|
|||
indexMetadata,
|
||||
tempIndexService -> {
|
||||
MapperService mapperService = tempIndexService.mapperService();
|
||||
for (Map<String, Map<String, Object>> mapping : mappings) {
|
||||
if (!mapping.isEmpty()) {
|
||||
assert mapping.size() == 1 : mapping;
|
||||
Map.Entry<String, Map<String, Object>> entry = mapping.entrySet().iterator().next();
|
||||
mapperService.merge(entry.getKey(), entry.getValue(), MapperService.MergeReason.INDEX_TEMPLATE);
|
||||
for (Map<String, Object> mapping : mappings) {
|
||||
if (mapping.isEmpty() == false) {
|
||||
mapperService.merge(MapperService.SINGLE_MAPPING_NAME, mapping, MapperService.MergeReason.INDEX_TEMPLATE);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -51,17 +51,17 @@ import org.opensearch.common.xcontent.XContentBuilder;
|
|||
import org.opensearch.common.xcontent.XContentFactory;
|
||||
import org.opensearch.common.xcontent.XContentHelper;
|
||||
import org.opensearch.common.xcontent.XContentParser;
|
||||
import org.opensearch.common.xcontent.json.JsonXContent;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.opensearch.cluster.metadata.Metadata.CONTEXT_MODE_PARAM;
|
||||
|
||||
public class IndexTemplateMetadata extends AbstractDiffable<IndexTemplateMetadata> {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(IndexTemplateMetadata.class);
|
||||
|
@ -161,12 +161,15 @@ public class IndexTemplateMetadata extends AbstractDiffable<IndexTemplateMetadat
|
|||
return this.settings;
|
||||
}
|
||||
|
||||
public ImmutableOpenMap<String, CompressedXContent> mappings() {
|
||||
return this.mappings;
|
||||
public CompressedXContent mappings() {
|
||||
if (this.mappings.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
return this.mappings.iterator().next().value;
|
||||
}
|
||||
|
||||
public ImmutableOpenMap<String, CompressedXContent> getMappings() {
|
||||
return this.mappings;
|
||||
public CompressedXContent getMappings() {
|
||||
return this.mappings();
|
||||
}
|
||||
|
||||
public ImmutableOpenMap<String, AliasMetadata> aliases() {
|
||||
|
@ -194,7 +197,7 @@ public class IndexTemplateMetadata extends AbstractDiffable<IndexTemplateMetadat
|
|||
if (!settings.equals(that.settings)) return false;
|
||||
if (!patterns.equals(that.patterns)) return false;
|
||||
|
||||
return Objects.equals(version, that.version);
|
||||
return Objects.equals(aliases, that.aliases) && Objects.equals(version, that.version);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -205,6 +208,7 @@ public class IndexTemplateMetadata extends AbstractDiffable<IndexTemplateMetadat
|
|||
result = 31 * result + patterns.hashCode();
|
||||
result = 31 * result + settings.hashCode();
|
||||
result = 31 * result + mappings.hashCode();
|
||||
result = 31 * result + aliases.hashCode();
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -248,6 +252,19 @@ public class IndexTemplateMetadata extends AbstractDiffable<IndexTemplateMetadat
|
|||
out.writeOptionalVInt(version);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
try {
|
||||
XContentBuilder builder = JsonXContent.contentBuilder();
|
||||
builder.startObject();
|
||||
Builder.toXContentWithTypes(this, builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
return Strings.toString(builder);
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
private static final Set<String> VALID_FIELDS = Sets.newHashSet(
|
||||
|
@ -286,7 +303,7 @@ public class IndexTemplateMetadata extends AbstractDiffable<IndexTemplateMetadat
|
|||
patterns(indexTemplateMetadata.patterns());
|
||||
settings(indexTemplateMetadata.settings());
|
||||
|
||||
mappings = ImmutableOpenMap.builder(indexTemplateMetadata.mappings());
|
||||
mappings = ImmutableOpenMap.builder(indexTemplateMetadata.mappings);
|
||||
aliases = ImmutableOpenMap.builder(indexTemplateMetadata.aliases());
|
||||
}
|
||||
|
||||
|
@ -356,23 +373,6 @@ public class IndexTemplateMetadata extends AbstractDiffable<IndexTemplateMetadat
|
|||
builder.endObject();
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the nested type in the xContent representation of {@link IndexTemplateMetadata}.
|
||||
*
|
||||
* This method is useful to help bridge the gap between an the internal representation which still uses (the legacy format) a
|
||||
* nested type in the mapping, and the external representation which does not use a nested type in the mapping.
|
||||
*/
|
||||
public static void removeType(IndexTemplateMetadata indexTemplateMetadata, XContentBuilder builder) throws IOException {
|
||||
builder.startObject();
|
||||
toInnerXContent(
|
||||
indexTemplateMetadata,
|
||||
builder,
|
||||
new ToXContent.MapParams(Collections.singletonMap("reduce_mappings", "true")),
|
||||
false
|
||||
);
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
/**
|
||||
* Serializes the template to xContent, making sure not to nest mappings under the
|
||||
* type name.
|
||||
|
@ -399,10 +399,6 @@ public class IndexTemplateMetadata extends AbstractDiffable<IndexTemplateMetadat
|
|||
ToXContent.Params params,
|
||||
boolean includeTypeName
|
||||
) throws IOException {
|
||||
Metadata.XContentContext context = params.param(CONTEXT_MODE_PARAM) != null
|
||||
? Metadata.XContentContext.valueOf(params.param(CONTEXT_MODE_PARAM))
|
||||
: null;
|
||||
|
||||
builder.field("order", indexTemplateMetadata.order());
|
||||
if (indexTemplateMetadata.version() != null) {
|
||||
builder.field("version", indexTemplateMetadata.version());
|
||||
|
@ -413,50 +409,19 @@ public class IndexTemplateMetadata extends AbstractDiffable<IndexTemplateMetadat
|
|||
indexTemplateMetadata.settings().toXContent(builder, params);
|
||||
builder.endObject();
|
||||
|
||||
if (context == Metadata.XContentContext.API) {
|
||||
builder.startObject("mappings");
|
||||
for (ObjectObjectCursor<String, CompressedXContent> cursor1 : indexTemplateMetadata.mappings()) {
|
||||
Map<String, Object> mapping = XContentHelper.convertToMap(cursor1.value.uncompressed(), false).v2();
|
||||
if (mapping.size() == 1 && mapping.containsKey(cursor1.key)) {
|
||||
// the type name is the root value, reduce it
|
||||
mapping = (Map<String, Object>) mapping.get(cursor1.key);
|
||||
}
|
||||
builder.field(cursor1.key);
|
||||
builder.map(mapping);
|
||||
}
|
||||
builder.endObject();
|
||||
} else if (params.paramAsBoolean("reduce_mappings", false)) {
|
||||
// The parameter include_type_name is only ever used in the REST API, where reduce_mappings is
|
||||
// always set to true. We therefore only check for include_type_name in this branch.
|
||||
includeTypeName &= (params.paramAsBoolean("reduce_mappings", false) == false);
|
||||
CompressedXContent m = indexTemplateMetadata.mappings();
|
||||
if (m != null) {
|
||||
Map<String, Object> documentMapping = XContentHelper.convertToMap(m.uncompressed(), true).v2();
|
||||
if (includeTypeName == false) {
|
||||
Map<String, Object> documentMapping = null;
|
||||
for (ObjectObjectCursor<String, CompressedXContent> cursor : indexTemplateMetadata.mappings()) {
|
||||
assert documentMapping == null;
|
||||
Map<String, Object> mapping = XContentHelper.convertToMap(cursor.value.uncompressed(), true).v2();
|
||||
documentMapping = reduceMapping(cursor.key, mapping);
|
||||
}
|
||||
|
||||
if (documentMapping != null) {
|
||||
builder.field("mappings", documentMapping);
|
||||
} else {
|
||||
builder.startObject("mappings").endObject();
|
||||
}
|
||||
documentMapping = reduceMapping(documentMapping);
|
||||
} else {
|
||||
builder.startObject("mappings");
|
||||
for (ObjectObjectCursor<String, CompressedXContent> cursor : indexTemplateMetadata.mappings()) {
|
||||
Map<String, Object> mapping = XContentHelper.convertToMap(cursor.value.uncompressed(), true).v2();
|
||||
mapping = reduceMapping(cursor.key, mapping);
|
||||
builder.field(cursor.key);
|
||||
builder.map(mapping);
|
||||
}
|
||||
builder.endObject();
|
||||
documentMapping = reduceEmptyMapping(documentMapping);
|
||||
}
|
||||
builder.field("mappings");
|
||||
builder.map(documentMapping);
|
||||
} else {
|
||||
builder.startArray("mappings");
|
||||
for (ObjectObjectCursor<String, CompressedXContent> cursor : indexTemplateMetadata.mappings()) {
|
||||
builder.map(XContentHelper.convertToMap(cursor.value.uncompressed(), true).v2());
|
||||
}
|
||||
builder.endArray();
|
||||
builder.startObject("mappings").endObject();
|
||||
}
|
||||
|
||||
builder.startObject("aliases");
|
||||
|
@ -467,15 +432,22 @@ public class IndexTemplateMetadata extends AbstractDiffable<IndexTemplateMetadat
|
|||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static Map<String, Object> reduceMapping(String type, Map<String, Object> mapping) {
|
||||
if (mapping.size() == 1 && mapping.containsKey(type)) {
|
||||
// the type name is the root value, reduce it
|
||||
return (Map<String, Object>) mapping.get(type);
|
||||
private static Map<String, Object> reduceEmptyMapping(Map<String, Object> mapping) {
|
||||
if (mapping.keySet().size() == 1
|
||||
&& mapping.containsKey(MapperService.SINGLE_MAPPING_NAME)
|
||||
&& ((Map<String, Object>) mapping.get(MapperService.SINGLE_MAPPING_NAME)).size() == 0) {
|
||||
return (Map<String, Object>) mapping.values().iterator().next();
|
||||
} else {
|
||||
return mapping;
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static Map<String, Object> reduceMapping(Map<String, Object> mapping) {
|
||||
assert mapping.keySet().size() == 1 : mapping.keySet();
|
||||
return (Map<String, Object>) mapping.values().iterator().next();
|
||||
}
|
||||
|
||||
public static IndexTemplateMetadata fromXContent(XContentParser parser, String templateName) throws IOException {
|
||||
Builder builder = new Builder(templateName);
|
||||
|
||||
|
|
|
@ -32,7 +32,6 @@
|
|||
|
||||
package org.opensearch.cluster.metadata;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
@ -114,7 +113,6 @@ import java.util.function.Supplier;
|
|||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static java.util.stream.Collectors.toList;
|
||||
import static org.opensearch.cluster.metadata.IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING;
|
||||
import static org.opensearch.cluster.metadata.IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING;
|
||||
|
@ -451,7 +449,7 @@ public class MetadataCreateIndexService {
|
|||
final boolean silent,
|
||||
final IndexMetadata sourceMetadata,
|
||||
final IndexMetadata temporaryIndexMeta,
|
||||
final List<Map<String, Map<String, Object>>> mappings,
|
||||
final List<Map<String, Object>> mappings,
|
||||
final Function<IndexService, List<AliasMetadata>> aliasSupplier,
|
||||
final List<String> templatesApplied,
|
||||
final BiConsumer<Metadata.Builder, IndexMetadata> metadataTransformer
|
||||
|
@ -541,20 +539,10 @@ public class MetadataCreateIndexService {
|
|||
templates.stream().map(IndexTemplateMetadata::name).collect(Collectors.toList())
|
||||
);
|
||||
|
||||
final Map<String, Map<String, Object>> mappings = Collections.unmodifiableMap(
|
||||
final Map<String, Object> mappings = Collections.unmodifiableMap(
|
||||
parseV1Mappings(
|
||||
request.mappings(),
|
||||
templates.stream()
|
||||
.map(IndexTemplateMetadata::getMappings)
|
||||
// Converts the ImmutableOpenMap into a non-terrible HashMap
|
||||
.map(iom -> {
|
||||
Map<String, CompressedXContent> converted = new HashMap<>(iom.size());
|
||||
for (ObjectObjectCursor<String, CompressedXContent> cursor : iom) {
|
||||
converted.put(cursor.key, cursor.value);
|
||||
}
|
||||
return converted;
|
||||
})
|
||||
.collect(toList()),
|
||||
templates.stream().map(IndexTemplateMetadata::getMappings).collect(toList()),
|
||||
xContentRegistry
|
||||
)
|
||||
);
|
||||
|
@ -616,7 +604,7 @@ public class MetadataCreateIndexService {
|
|||
);
|
||||
}
|
||||
|
||||
final List<Map<String, Map<String, Object>>> mappings = collectV2Mappings(
|
||||
final List<Map<String, Object>> mappings = collectV2Mappings(
|
||||
request.mappings(),
|
||||
currentState,
|
||||
templateName,
|
||||
|
@ -659,29 +647,31 @@ public class MetadataCreateIndexService {
|
|||
);
|
||||
}
|
||||
|
||||
public static List<Map<String, Map<String, Object>>> collectV2Mappings(
|
||||
final Map<String, String> requestMappings,
|
||||
public static List<Map<String, Object>> collectV2Mappings(
|
||||
final String requestMappings,
|
||||
final ClusterState currentState,
|
||||
final String templateName,
|
||||
final NamedXContentRegistry xContentRegistry,
|
||||
final String indexName
|
||||
) throws Exception {
|
||||
List<Map<String, Map<String, Object>>> result = new ArrayList<>();
|
||||
|
||||
List<CompressedXContent> templateMappings = MetadataIndexTemplateService.collectMappings(currentState, templateName, indexName);
|
||||
return collectV2Mappings(requestMappings, templateMappings, xContentRegistry);
|
||||
}
|
||||
|
||||
public static List<Map<String, Object>> collectV2Mappings(
|
||||
final String requestMappings,
|
||||
final List<CompressedXContent> templateMappings,
|
||||
final NamedXContentRegistry xContentRegistry
|
||||
) throws Exception {
|
||||
List<Map<String, Object>> result = new ArrayList<>();
|
||||
|
||||
for (CompressedXContent templateMapping : templateMappings) {
|
||||
Map<String, Object> parsedTemplateMapping = MapperService.parseMapping(xContentRegistry, templateMapping.string());
|
||||
result.add(singletonMap(MapperService.SINGLE_MAPPING_NAME, parsedTemplateMapping));
|
||||
result.add(parsedTemplateMapping);
|
||||
}
|
||||
|
||||
if (requestMappings.size() > 0) {
|
||||
assert requestMappings.size() == 1 : "expected request metadata mappings to have 1 type but it had: " + requestMappings;
|
||||
Map.Entry<String, String> entry = requestMappings.entrySet().iterator().next();
|
||||
|
||||
String type = entry.getKey();
|
||||
Map<String, Object> parsedMappings = MapperService.parseMapping(xContentRegistry, entry.getValue());
|
||||
result.add(singletonMap(type, parsedMappings));
|
||||
}
|
||||
Map<String, Object> parsedRequestMappings = MapperService.parseMapping(xContentRegistry, requestMappings);
|
||||
result.add(parsedRequestMappings);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -694,7 +684,8 @@ public class MetadataCreateIndexService {
|
|||
) throws Exception {
|
||||
logger.info("applying create index request using existing index [{}] metadata", sourceMetadata.getIndex().getName());
|
||||
|
||||
if (request.mappings().size() > 0) {
|
||||
final Map<String, Object> mappings = MapperService.parseMapping(xContentRegistry, request.mappings());
|
||||
if (mappings.isEmpty() == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"mappings are not allowed when creating an index from a source index, " + "all mappings are copied from the source index"
|
||||
);
|
||||
|
@ -719,7 +710,7 @@ public class MetadataCreateIndexService {
|
|||
silent,
|
||||
sourceMetadata,
|
||||
tmpImd,
|
||||
Collections.emptyList(),
|
||||
Collections.singletonList(mappings),
|
||||
indexService -> resolveAndValidateAliases(
|
||||
request.index(),
|
||||
request.aliases(),
|
||||
|
@ -745,55 +736,28 @@ public class MetadataCreateIndexService {
|
|||
* {@link IndexTemplateMetadata#order()}). This merging makes no distinction between field
|
||||
* definitions, as may result in an invalid field definition
|
||||
*/
|
||||
static Map<String, Map<String, Object>> parseV1Mappings(
|
||||
Map<String, String> requestMappings,
|
||||
List<Map<String, CompressedXContent>> templateMappings,
|
||||
static Map<String, Object> parseV1Mappings(
|
||||
String requestMappings,
|
||||
List<CompressedXContent> templateMappings,
|
||||
NamedXContentRegistry xContentRegistry
|
||||
) throws Exception {
|
||||
Map<String, Map<String, Object>> mappings = new HashMap<>();
|
||||
for (Map.Entry<String, String> entry : requestMappings.entrySet()) {
|
||||
Map<String, Object> mapping = MapperService.parseMapping(xContentRegistry, entry.getValue());
|
||||
if (mapping.isEmpty()) {
|
||||
// Someone provided an empty '{}' for mappings, which is okay, but to avoid
|
||||
// tripping the below assertion, we can safely ignore it
|
||||
continue;
|
||||
}
|
||||
assert mapping.size() == 1 : mapping;
|
||||
assert entry.getKey().equals(mapping.keySet().iterator().next()) : entry.getKey() + " != " + mapping;
|
||||
mappings.put(entry.getKey(), mapping);
|
||||
}
|
||||
|
||||
Map<String, Object> mappings = MapperService.parseMapping(xContentRegistry, requestMappings);
|
||||
// apply templates, merging the mappings into the request mapping if exists
|
||||
for (Map<String, CompressedXContent> tMapping : templateMappings) {
|
||||
for (Map.Entry<String, CompressedXContent> cursor : tMapping.entrySet()) {
|
||||
String mappingString = cursor.getValue().string();
|
||||
String type = cursor.getKey();
|
||||
if (mappings.containsKey(type)) {
|
||||
XContentHelper.mergeDefaults(mappings.get(type), MapperService.parseMapping(xContentRegistry, mappingString));
|
||||
} else if (mappings.size() == 1 && type.equals(MapperService.SINGLE_MAPPING_NAME)) {
|
||||
// Typeless template with typed mapping
|
||||
Map<String, Object> templateMapping = MapperService.parseMapping(xContentRegistry, mappingString);
|
||||
assert templateMapping.size() == 1 : templateMapping;
|
||||
assert type.equals(templateMapping.keySet().iterator().next()) : type + " != " + templateMapping;
|
||||
Map.Entry<String, Map<String, Object>> mappingEntry = mappings.entrySet().iterator().next();
|
||||
templateMapping = singletonMap(
|
||||
mappingEntry.getKey(), // reuse type name from the mapping
|
||||
templateMapping.values().iterator().next()
|
||||
); // but actual mappings from the template
|
||||
XContentHelper.mergeDefaults(mappingEntry.getValue(), templateMapping);
|
||||
} else if (tMapping.size() == 1 && mappings.containsKey(MapperService.SINGLE_MAPPING_NAME)) {
|
||||
// Typed template with typeless mapping
|
||||
Map<String, Object> templateMapping = MapperService.parseMapping(xContentRegistry, mappingString);
|
||||
assert templateMapping.size() == 1 : templateMapping;
|
||||
assert type.equals(templateMapping.keySet().iterator().next()) : type + " != " + templateMapping;
|
||||
Map<String, Object> mapping = mappings.get(MapperService.SINGLE_MAPPING_NAME);
|
||||
templateMapping = singletonMap(
|
||||
MapperService.SINGLE_MAPPING_NAME, // make template mapping typeless
|
||||
templateMapping.values().iterator().next()
|
||||
);
|
||||
XContentHelper.mergeDefaults(mapping, templateMapping);
|
||||
for (CompressedXContent mapping : templateMappings) {
|
||||
if (mapping != null) {
|
||||
Map<String, Object> templateMapping = MapperService.parseMapping(xContentRegistry, mapping.string());
|
||||
if (templateMapping.isEmpty()) {
|
||||
// Someone provided an empty '{}' for mappings, which is okay, but to avoid
|
||||
// tripping the below assertion, we can safely ignore it
|
||||
continue;
|
||||
}
|
||||
assert templateMapping.size() == 1 : "expected exactly one mapping value, got: " + templateMapping;
|
||||
// pre-8x templates may have a wrapper type other than _doc, so we re-wrap things here
|
||||
templateMapping = Collections.singletonMap(MapperService.SINGLE_MAPPING_NAME, templateMapping.values().iterator().next());
|
||||
if (mappings.isEmpty()) {
|
||||
mappings = templateMapping;
|
||||
} else {
|
||||
mappings.put(type, MapperService.parseMapping(xContentRegistry, mappingString));
|
||||
XContentHelper.mergeDefaults(mappings, templateMapping);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1170,15 +1134,13 @@ public class MetadataCreateIndexService {
|
|||
private static void updateIndexMappingsAndBuildSortOrder(
|
||||
IndexService indexService,
|
||||
CreateIndexClusterStateUpdateRequest request,
|
||||
List<Map<String, Map<String, Object>>> mappings,
|
||||
List<Map<String, Object>> mappings,
|
||||
@Nullable IndexMetadata sourceMetadata
|
||||
) throws IOException {
|
||||
MapperService mapperService = indexService.mapperService();
|
||||
for (Map<String, Map<String, Object>> mapping : mappings) {
|
||||
if (!mapping.isEmpty()) {
|
||||
assert mapping.size() == 1 : mapping;
|
||||
Map.Entry<String, Map<String, Object>> entry = mapping.entrySet().iterator().next();
|
||||
mapperService.merge(entry.getKey(), entry.getValue(), MergeReason.INDEX_TEMPLATE);
|
||||
for (Map<String, Object> mapping : mappings) {
|
||||
if (mapping.isEmpty() == false) {
|
||||
mapperService.merge(MapperService.SINGLE_MAPPING_NAME, mapping, MergeReason.INDEX_TEMPLATE);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -932,6 +932,11 @@ public class MetadataIndexTemplateService {
|
|||
templateBuilder.putAlias(aliasMetadata);
|
||||
}
|
||||
IndexTemplateMetadata template = templateBuilder.build();
|
||||
IndexTemplateMetadata existingTemplate = currentState.metadata().templates().get(request.name);
|
||||
if (template.equals(existingTemplate)) {
|
||||
// The template is unchanged, therefore there is no need for a cluster state update
|
||||
return currentState;
|
||||
}
|
||||
|
||||
Metadata.Builder builder = Metadata.builder(currentState.metadata()).put(template);
|
||||
|
||||
|
|
|
@ -81,7 +81,7 @@ public class DocumentMapper implements ToXContentFragment {
|
|||
this.rootObjectMapper = builder.build(builderContext);
|
||||
|
||||
final String type = rootObjectMapper.name();
|
||||
final DocumentMapper existingMapper = mapperService.documentMapper(type);
|
||||
final DocumentMapper existingMapper = mapperService.documentMapper();
|
||||
final Version indexCreatedVersion = mapperService.getIndexSettings().getIndexVersionCreated();
|
||||
final Map<String, TypeParser> metadataMapperParsers = mapperService.mapperRegistry.getMetadataMapperParsers(
|
||||
indexCreatedVersion
|
||||
|
|
|
@ -306,13 +306,13 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
// refresh mapping can happen when the parsing/merging of the mapping from the metadata doesn't result in the same
|
||||
// mapping, in this case, we send to the master to refresh its own version of the mappings (to conform with the
|
||||
// merge version of it, which it does when refreshing the mappings), and warn log it.
|
||||
if (documentMapper(mappingType).mappingSource().equals(incomingMappingSource) == false) {
|
||||
if (documentMapper().mappingSource().equals(incomingMappingSource) == false) {
|
||||
logger.debug(
|
||||
"[{}] parsed mapping [{}], and got different sources\noriginal:\n{}\nparsed:\n{}",
|
||||
index(),
|
||||
mappingType,
|
||||
incomingMappingSource,
|
||||
documentMapper(mappingType).mappingSource()
|
||||
documentMapper().mappingSource()
|
||||
);
|
||||
|
||||
requireRefresh = true;
|
||||
|
@ -530,16 +530,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
return mapper;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link DocumentMapper} for the given type.
|
||||
*/
|
||||
public DocumentMapper documentMapper(String type) {
|
||||
if (mapper != null && type.equals(mapper.type())) {
|
||||
return mapper;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@code true} if the given {@code mappingSource} includes a type
|
||||
* as a top-level object.
|
||||
|
@ -574,12 +564,12 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
* Returns the document mapper created, including a mapping update if the
|
||||
* type has been dynamically created.
|
||||
*/
|
||||
public DocumentMapperForType documentMapperWithAutoCreate(String type) {
|
||||
DocumentMapper mapper = documentMapper(type);
|
||||
public DocumentMapperForType documentMapperWithAutoCreate() {
|
||||
DocumentMapper mapper = documentMapper();
|
||||
if (mapper != null) {
|
||||
return new DocumentMapperForType(mapper, null);
|
||||
}
|
||||
mapper = parse(type, null);
|
||||
mapper = parse(SINGLE_MAPPING_NAME, null);
|
||||
return new DocumentMapperForType(mapper, mapper.mapping());
|
||||
}
|
||||
|
||||
|
|
|
@ -312,11 +312,11 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||
}
|
||||
|
||||
/**
|
||||
* Returns s {@link DocumentMapper} instance for the given type.
|
||||
* Delegates to {@link MapperService#documentMapper(String)}
|
||||
* Returns s {@link DocumentMapper} instance.
|
||||
* Delegates to {@link MapperService#documentMapper()}
|
||||
*/
|
||||
public DocumentMapper documentMapper(String type) {
|
||||
return mapperService.documentMapper(type);
|
||||
return mapperService.documentMapper();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -137,7 +137,7 @@ public class TypeQueryBuilder extends AbstractQueryBuilder<TypeQueryBuilder> {
|
|||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
deprecationLogger.deprecate("type_query", TYPES_DEPRECATION_MESSAGE);
|
||||
// LUCENE 4 UPGRADE document mapper should use bytesref as well?
|
||||
DocumentMapper documentMapper = context.getMapperService().documentMapper(type);
|
||||
DocumentMapper documentMapper = context.getMapperService().documentMapper();
|
||||
if (documentMapper == null) {
|
||||
// no type means no documents
|
||||
return new MatchNoDocsQuery();
|
||||
|
|
|
@ -882,7 +882,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
);
|
||||
}
|
||||
operation = prepareIndex(
|
||||
docMapper(resolvedType),
|
||||
docMapper(),
|
||||
sourceWithResolvedType,
|
||||
seqNo,
|
||||
opPrimaryTerm,
|
||||
|
@ -1102,7 +1102,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
// fail if index and delete operations do not use the same type.
|
||||
// TODO: clean this up when types are gone
|
||||
try {
|
||||
Mapping update = docMapper(type).getMapping();
|
||||
Mapping update = docMapper().getMapping();
|
||||
if (update != null) {
|
||||
return new Engine.DeleteResult(update);
|
||||
}
|
||||
|
@ -1249,7 +1249,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
return getEngine().getSeqNoStats(replicationTracker.getGlobalCheckpoint());
|
||||
}
|
||||
|
||||
public IndexingStats indexingStats(String... types) {
|
||||
public IndexingStats indexingStats() {
|
||||
Engine engine = getEngineOrNull();
|
||||
final boolean throttled;
|
||||
final long throttleTimeInMillis;
|
||||
|
@ -3143,8 +3143,8 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
}
|
||||
}
|
||||
|
||||
private DocumentMapperForType docMapper(String type) {
|
||||
return mapperService.documentMapperWithAutoCreate(mapperService.resolveDocumentType(type));
|
||||
private DocumentMapperForType docMapper() {
|
||||
return mapperService.documentMapperWithAutoCreate();
|
||||
}
|
||||
|
||||
private EngineConfig newEngineConfig(LongSupplier globalCheckpointSupplier) {
|
||||
|
@ -3874,7 +3874,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
return new EngineConfig.TombstoneDocSupplier() {
|
||||
@Override
|
||||
public ParsedDocument newDeleteTombstoneDoc(String type, String id) {
|
||||
return docMapper(type).getDocumentMapper().createDeleteTombstoneDoc(shardId.getIndexName(), type, id);
|
||||
return docMapper().getDocumentMapper().createDeleteTombstoneDoc(shardId.getIndexName(), type, id);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -389,7 +389,7 @@ public class TermVectorsService {
|
|||
String routing
|
||||
) {
|
||||
MapperService mapperService = indexShard.mapperService();
|
||||
DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(MapperService.SINGLE_MAPPING_NAME);
|
||||
DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate();
|
||||
ParsedDocument parsedDocument = docMapper.getDocumentMapper()
|
||||
.parse(new SourceToParse(index, MapperService.SINGLE_MAPPING_NAME, "_id_for_tv_api", doc, xContentType, routing));
|
||||
if (docMapper.getMapping() != null) {
|
||||
|
|
|
@ -94,12 +94,7 @@ public class RestGetFieldMappingAction extends BaseRestHandler {
|
|||
.getFieldMappings(getMappingsRequest, new RestBuilderListener<GetFieldMappingsResponse>(channel) {
|
||||
@Override
|
||||
public RestResponse buildResponse(GetFieldMappingsResponse response, XContentBuilder builder) throws Exception {
|
||||
Map<String, Map<String, Map<String, FieldMappingMetadata>>> mappingsByIndex = response.mappings();
|
||||
|
||||
boolean isPossibleSingleFieldRequest = indices.length == 1 && fields.length == 1;
|
||||
if (isPossibleSingleFieldRequest && isFieldMappingMissingField(mappingsByIndex)) {
|
||||
return new BytesRestResponse(OK, builder.startObject().endObject());
|
||||
}
|
||||
Map<String, Map<String, FieldMappingMetadata>> mappingsByIndex = response.mappings();
|
||||
|
||||
RestStatus status = OK;
|
||||
if (mappingsByIndex.isEmpty() && fields.length > 0) {
|
||||
|
@ -111,24 +106,4 @@ public class RestGetFieldMappingAction extends BaseRestHandler {
|
|||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to find out if the only included fieldmapping metadata is typed NULL, which means
|
||||
* that type and index exist, but the field did not
|
||||
*/
|
||||
private boolean isFieldMappingMissingField(Map<String, Map<String, Map<String, FieldMappingMetadata>>> mappingsByIndex) {
|
||||
if (mappingsByIndex.size() != 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (Map<String, Map<String, FieldMappingMetadata>> value : mappingsByIndex.values()) {
|
||||
for (Map<String, FieldMappingMetadata> fieldValue : value.values()) {
|
||||
for (Map.Entry<String, FieldMappingMetadata> fieldMappingMetadataEntry : fieldValue.entrySet()) {
|
||||
if (fieldMappingMetadataEntry.getValue().isNull()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -66,7 +66,7 @@ public class CreateIndexRequestTests extends OpenSearchTestCase {
|
|||
try (StreamInput in = output.bytes().streamInput()) {
|
||||
CreateIndexRequest serialized = new CreateIndexRequest(in);
|
||||
assertEquals(request.index(), serialized.index());
|
||||
assertEquals(mapping, serialized.mappings().get("my_type"));
|
||||
assertEquals("{\"_doc\":{}}", serialized.mappings());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,6 +33,7 @@
|
|||
package org.opensearch.action.admin.indices.mapping.get;
|
||||
|
||||
import org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata;
|
||||
import org.opensearch.common.Strings;
|
||||
import org.opensearch.common.bytes.BytesArray;
|
||||
import org.opensearch.common.io.stream.BytesStreamOutput;
|
||||
import org.opensearch.common.io.stream.StreamInput;
|
||||
|
@ -47,22 +48,29 @@ import java.util.Map;
|
|||
public class GetFieldMappingsResponseTests extends AbstractWireSerializingTestCase<GetFieldMappingsResponse> {
|
||||
|
||||
public void testManualSerialization() throws IOException {
|
||||
Map<String, Map<String, Map<String, FieldMappingMetadata>>> mappings = new HashMap<>();
|
||||
Map<String, Map<String, FieldMappingMetadata>> mappings = new HashMap<>();
|
||||
FieldMappingMetadata fieldMappingMetadata = new FieldMappingMetadata("my field", new BytesArray("{}"));
|
||||
mappings.put("index", Collections.singletonMap("type", Collections.singletonMap("field", fieldMappingMetadata)));
|
||||
mappings.put("index", Collections.singletonMap("field", fieldMappingMetadata));
|
||||
GetFieldMappingsResponse response = new GetFieldMappingsResponse(mappings);
|
||||
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
response.writeTo(out);
|
||||
try (StreamInput in = StreamInput.wrap(out.bytes().toBytesRef().bytes)) {
|
||||
GetFieldMappingsResponse serialized = new GetFieldMappingsResponse(in);
|
||||
FieldMappingMetadata metadata = serialized.fieldMappings("index", "type", "field");
|
||||
FieldMappingMetadata metadata = serialized.fieldMappings("index", "field");
|
||||
assertNotNull(metadata);
|
||||
assertEquals(new BytesArray("{}"), metadata.getSource());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testNullFieldMappingToXContent() {
|
||||
Map<String, Map<String, FieldMappingMetadata>> mappings = new HashMap<>();
|
||||
mappings.put("index", Collections.emptyMap());
|
||||
GetFieldMappingsResponse response = new GetFieldMappingsResponse(mappings);
|
||||
assertEquals("{\"index\":{\"mappings\":{}}}", Strings.toString(response));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GetFieldMappingsResponse createTestInstance() {
|
||||
return new GetFieldMappingsResponse(randomMapping());
|
||||
|
@ -73,24 +81,18 @@ public class GetFieldMappingsResponseTests extends AbstractWireSerializingTestCa
|
|||
return GetFieldMappingsResponse::new;
|
||||
}
|
||||
|
||||
private Map<String, Map<String, Map<String, FieldMappingMetadata>>> randomMapping() {
|
||||
Map<String, Map<String, Map<String, FieldMappingMetadata>>> mappings = new HashMap<>();
|
||||
private Map<String, Map<String, FieldMappingMetadata>> randomMapping() {
|
||||
Map<String, Map<String, FieldMappingMetadata>> mappings = new HashMap<>();
|
||||
|
||||
int indices = randomInt(10);
|
||||
for (int i = 0; i < indices; i++) {
|
||||
final Map<String, Map<String, FieldMappingMetadata>> doctypesMappings = new HashMap<>();
|
||||
int doctypes = randomInt(10);
|
||||
for (int j = 0; j < doctypes; j++) {
|
||||
Map<String, FieldMappingMetadata> fieldMappings = new HashMap<>();
|
||||
int fields = randomInt(10);
|
||||
for (int k = 0; k < fields; k++) {
|
||||
final String mapping = randomBoolean() ? "{\"type\":\"string\"}" : "{\"type\":\"keyword\"}";
|
||||
FieldMappingMetadata metadata = new FieldMappingMetadata("my field", new BytesArray(mapping));
|
||||
fieldMappings.put("field" + k, metadata);
|
||||
}
|
||||
doctypesMappings.put("doctype" + j, fieldMappings);
|
||||
Map<String, FieldMappingMetadata> fieldMappings = new HashMap<>();
|
||||
int fields = randomInt(10);
|
||||
for (int k = 0; k < fields; k++) {
|
||||
final String mapping = randomBoolean() ? "{\"type\":\"string\"}" : "{\"type\":\"keyword\"}";
|
||||
FieldMappingMetadata metaData = new FieldMappingMetadata("my field", new BytesArray(mapping));
|
||||
fieldMappings.put("field" + k, metaData);
|
||||
}
|
||||
mappings.put("index" + i, doctypesMappings);
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
|
|
|
@ -62,6 +62,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class RolloverRequestTests extends OpenSearchTestCase {
|
||||
|
@ -122,7 +123,7 @@ public class RolloverRequestTests extends OpenSearchTestCase {
|
|||
request.fromXContent(createParser(builder));
|
||||
Map<String, Condition<?>> conditions = request.getConditions();
|
||||
assertThat(conditions.size(), equalTo(2));
|
||||
assertThat(request.getCreateIndexRequest().mappings().size(), equalTo(1));
|
||||
assertThat(request.getCreateIndexRequest().mappings(), containsString("not_analyzed"));
|
||||
assertThat(request.getCreateIndexRequest().aliases().size(), equalTo(1));
|
||||
assertThat(request.getCreateIndexRequest().settings().getAsInt("number_of_shards", 0), equalTo(10));
|
||||
}
|
||||
|
@ -143,7 +144,7 @@ public class RolloverRequestTests extends OpenSearchTestCase {
|
|||
request.fromXContent(createParser(builder));
|
||||
|
||||
CreateIndexRequest createIndexRequest = request.getCreateIndexRequest();
|
||||
String mapping = createIndexRequest.mappings().get(MapperService.SINGLE_MAPPING_NAME);
|
||||
String mapping = createIndexRequest.mappings();
|
||||
assertNotNull(mapping);
|
||||
|
||||
Map<String, Object> parsedMapping = XContentHelper.convertToMap(new BytesArray(mapping), false, XContentType.JSON).v2();
|
||||
|
|
|
@ -221,9 +221,7 @@ public class ClusterStateTests extends OpenSearchTestCase {
|
|||
+ " }\n"
|
||||
+ " },\n"
|
||||
+ " \"mappings\" : {\n"
|
||||
+ " \"type\" : {\n"
|
||||
+ " \"key1\" : { }\n"
|
||||
+ " }\n"
|
||||
+ " \"key1\" : { }\n"
|
||||
+ " },\n"
|
||||
+ " \"aliases\" : { }\n"
|
||||
+ " }\n"
|
||||
|
@ -424,9 +422,7 @@ public class ClusterStateTests extends OpenSearchTestCase {
|
|||
+ "\"\n"
|
||||
+ " },\n"
|
||||
+ " \"mappings\" : {\n"
|
||||
+ " \"type\" : {\n"
|
||||
+ " \"key1\" : { }\n"
|
||||
+ " }\n"
|
||||
+ " \"key1\" : { }\n"
|
||||
+ " },\n"
|
||||
+ " \"aliases\" : { }\n"
|
||||
+ " }\n"
|
||||
|
@ -627,11 +623,7 @@ public class ClusterStateTests extends OpenSearchTestCase {
|
|||
+ " }\n"
|
||||
+ " }\n"
|
||||
+ " },\n"
|
||||
+ " \"mappings\" : {\n"
|
||||
+ " \"type\" : {\n"
|
||||
+ " \"key1\" : { }\n"
|
||||
+ " }\n"
|
||||
+ " },\n"
|
||||
+ " \"mappings\" : { },\n"
|
||||
+ " \"aliases\" : { }\n"
|
||||
+ " }\n"
|
||||
+ " },\n"
|
||||
|
|
|
@ -49,14 +49,12 @@ import java.io.IOException;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
|
||||
public class IndexTemplateMetadataTests extends OpenSearchTestCase {
|
||||
|
||||
public void testIndexTemplateMetadataXContentRoundTrip() throws Exception {
|
||||
ToXContent.Params params = new ToXContent.MapParams(singletonMap("reduce_mappings", "true"));
|
||||
|
||||
String template = "{\"index_patterns\" : [ \".test-*\" ],\"order\" : 1000,"
|
||||
+ "\"settings\" : {\"number_of_shards\" : 1,\"number_of_replicas\" : 0},"
|
||||
|
@ -84,7 +82,7 @@ public class IndexTemplateMetadataTests extends OpenSearchTestCase {
|
|||
final BytesReference templateBytesRoundTrip;
|
||||
try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent)) {
|
||||
builder.startObject();
|
||||
IndexTemplateMetadata.Builder.toXContentWithTypes(indexTemplateMetadata, builder, params);
|
||||
IndexTemplateMetadata.Builder.toXContentWithTypes(indexTemplateMetadata, builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
templateBytesRoundTrip = BytesReference.bytes(builder);
|
||||
}
|
||||
|
|
|
@ -109,7 +109,6 @@ import static java.util.Collections.emptyList;
|
|||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.singleton;
|
||||
import static java.util.Collections.singletonList;
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.opensearch.index.IndexSettings.INDEX_SOFT_DELETES_SETTING;
|
||||
import static org.hamcrest.Matchers.endsWith;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
@ -711,20 +710,18 @@ public class MetadataCreateIndexServiceTests extends OpenSearchTestCase {
|
|||
templateBuilder.putAlias(AliasMetadata.builder("alias1"));
|
||||
templateBuilder.putMapping("type", createMapping("mapping_from_template", "text"));
|
||||
});
|
||||
request.mappings(singletonMap("type", createMapping("mapping_from_request", "text").string()));
|
||||
request.mappings(createMapping("mapping_from_request", "text").string());
|
||||
|
||||
Map<String, Map<String, Object>> parsedMappings = MetadataCreateIndexService.parseV1Mappings(
|
||||
Map<String, Object> parsedMappings = MetadataCreateIndexService.parseV1Mappings(
|
||||
request.mappings(),
|
||||
Collections.singletonList(convertMappings(templateMetadata.getMappings())),
|
||||
Collections.singletonList(templateMetadata.getMappings()),
|
||||
NamedXContentRegistry.EMPTY
|
||||
);
|
||||
|
||||
assertThat(parsedMappings, hasKey("type"));
|
||||
Map<String, Object> mappingType = parsedMappings.get("type");
|
||||
assertThat(mappingType, hasKey("type"));
|
||||
Map<String, Object> type = (Map<String, Object>) mappingType.get("type");
|
||||
assertThat(type, hasKey("properties"));
|
||||
Map<String, Object> mappingsProperties = (Map<String, Object>) type.get("properties");
|
||||
assertThat(parsedMappings, hasKey(MapperService.SINGLE_MAPPING_NAME));
|
||||
Map<String, Object> doc = (Map<String, Object>) parsedMappings.get(MapperService.SINGLE_MAPPING_NAME);
|
||||
assertThat(doc, hasKey("properties"));
|
||||
Map<String, Object> mappingsProperties = (Map<String, Object>) doc.get("properties");
|
||||
assertThat(mappingsProperties, hasKey("mapping_from_request"));
|
||||
assertThat(mappingsProperties, hasKey("mapping_from_template"));
|
||||
}
|
||||
|
@ -781,17 +778,17 @@ public class MetadataCreateIndexServiceTests extends OpenSearchTestCase {
|
|||
|
||||
IndexTemplateMetadata templateMetadata = addMatchingTemplate(
|
||||
builder -> builder.putAlias(AliasMetadata.builder("alias").searchRouting("fromTemplate").build())
|
||||
.putMapping("type", templateMapping)
|
||||
.putMapping("_doc", templateMapping)
|
||||
.settings(Settings.builder().put("key1", "templateValue"))
|
||||
);
|
||||
|
||||
request.mappings(singletonMap("type", reqMapping.string()));
|
||||
request.aliases(singleton(new Alias("alias").searchRouting("fromRequest")));
|
||||
request.mappings(reqMapping.string());
|
||||
request.aliases(Collections.singleton(new Alias("alias").searchRouting("fromRequest")));
|
||||
request.settings(Settings.builder().put("key1", "requestValue").build());
|
||||
|
||||
Map<String, Map<String, Object>> parsedMappings = MetadataCreateIndexService.parseV1Mappings(
|
||||
Map<String, Object> parsedMappings = MetadataCreateIndexService.parseV1Mappings(
|
||||
request.mappings(),
|
||||
Collections.singletonList(convertMappings(templateMetadata.mappings())),
|
||||
Collections.singletonList(templateMetadata.mappings()),
|
||||
xContentRegistry()
|
||||
);
|
||||
List<AliasMetadata> resolvedAliases = resolveAndValidateAliases(
|
||||
|
@ -816,12 +813,10 @@ public class MetadataCreateIndexServiceTests extends OpenSearchTestCase {
|
|||
|
||||
assertThat(resolvedAliases.get(0).getSearchRouting(), equalTo("fromRequest"));
|
||||
assertThat(aggregatedIndexSettings.get("key1"), equalTo("requestValue"));
|
||||
assertThat(parsedMappings, hasKey("type"));
|
||||
Map<String, Object> mappingType = parsedMappings.get("type");
|
||||
assertThat(mappingType, hasKey("type"));
|
||||
Map<String, Object> type = (Map<String, Object>) mappingType.get("type");
|
||||
assertThat(type, hasKey("properties"));
|
||||
Map<String, Object> mappingsProperties = (Map<String, Object>) type.get("properties");
|
||||
assertThat(parsedMappings, hasKey("_doc"));
|
||||
Map<String, Object> doc = (Map<String, Object>) parsedMappings.get("_doc");
|
||||
assertThat(doc, hasKey("properties"));
|
||||
Map<String, Object> mappingsProperties = (Map<String, Object>) doc.get("properties");
|
||||
assertThat(mappingsProperties, hasKey("test"));
|
||||
assertThat((Map<String, Object>) mappingsProperties.get("test"), hasValue("keyword"));
|
||||
}
|
||||
|
@ -1046,9 +1041,9 @@ public class MetadataCreateIndexServiceTests extends OpenSearchTestCase {
|
|||
}
|
||||
});
|
||||
|
||||
Map<String, Map<String, Object>> mappings = parseV1Mappings(
|
||||
singletonMap(MapperService.SINGLE_MAPPING_NAME, "{\"_doc\":{}}"),
|
||||
Collections.singletonList(convertMappings(templateMetadata.mappings())),
|
||||
Map<String, Object> mappings = parseV1Mappings(
|
||||
"{\"" + MapperService.SINGLE_MAPPING_NAME + "\":{}}",
|
||||
Collections.singletonList(templateMetadata.mappings()),
|
||||
xContentRegistry()
|
||||
);
|
||||
assertThat(mappings, Matchers.hasKey(MapperService.SINGLE_MAPPING_NAME));
|
||||
|
@ -1062,12 +1057,8 @@ public class MetadataCreateIndexServiceTests extends OpenSearchTestCase {
|
|||
ExceptionsHelper.reThrowIfNotNull(e);
|
||||
}
|
||||
});
|
||||
Map<String, Map<String, Object>> mappings = parseV1Mappings(
|
||||
emptyMap(),
|
||||
Collections.singletonList(convertMappings(templateMetadata.mappings())),
|
||||
xContentRegistry()
|
||||
);
|
||||
assertThat(mappings, Matchers.hasKey("type"));
|
||||
Map<String, Object> mappings = parseV1Mappings("", Collections.singletonList(templateMetadata.mappings()), xContentRegistry());
|
||||
assertThat(mappings, Matchers.hasKey(MapperService.SINGLE_MAPPING_NAME));
|
||||
}
|
||||
|
||||
public void testParseMappingsWithTypelessTemplate() throws Exception {
|
||||
|
@ -1078,11 +1069,7 @@ public class MetadataCreateIndexServiceTests extends OpenSearchTestCase {
|
|||
ExceptionsHelper.reThrowIfNotNull(e);
|
||||
}
|
||||
});
|
||||
Map<String, Map<String, Object>> mappings = parseV1Mappings(
|
||||
emptyMap(),
|
||||
Collections.singletonList(convertMappings(templateMetadata.mappings())),
|
||||
xContentRegistry()
|
||||
);
|
||||
Map<String, Object> mappings = parseV1Mappings("", Collections.singletonList(templateMetadata.mappings()), xContentRegistry());
|
||||
assertThat(mappings, Matchers.hasKey(MapperService.SINGLE_MAPPING_NAME));
|
||||
}
|
||||
|
||||
|
@ -1253,7 +1240,7 @@ public class MetadataCreateIndexServiceTests extends OpenSearchTestCase {
|
|||
final String mapping = Strings.toString(
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type")
|
||||
.startObject(MapperService.SINGLE_MAPPING_NAME)
|
||||
.startObject("properties")
|
||||
.startObject(fieldName)
|
||||
.field("type", fieldType)
|
||||
|
|
|
@ -970,7 +970,6 @@ public class MetadataIndexTemplateServiceTests extends OpenSearchSingleNodeTestC
|
|||
}
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/57393")
|
||||
public void testResolveConflictingMappings() throws Exception {
|
||||
final MetadataIndexTemplateService service = getMetadataIndexTemplateService();
|
||||
ClusterState state = ClusterState.EMPTY_STATE;
|
||||
|
@ -2066,6 +2065,27 @@ public class MetadataIndexTemplateServiceTests extends OpenSearchSingleNodeTestC
|
|||
service.addIndexTemplateV2(stateWithDSAndTemplate, false, "logs", nonDSTemplate);
|
||||
}
|
||||
|
||||
public void testLegacyNoopUpdate() {
|
||||
ClusterState state = ClusterState.EMPTY_STATE;
|
||||
PutRequest pr = new PutRequest("api", "id");
|
||||
pr.patterns(Arrays.asList("foo", "bar"));
|
||||
if (randomBoolean()) {
|
||||
pr.settings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 3).build());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
pr.mappings(Collections.emptyMap());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
pr.aliases(Collections.singleton(new Alias("alias")));
|
||||
}
|
||||
pr.order(randomIntBetween(0, 10));
|
||||
state = MetadataIndexTemplateService.innerPutTemplate(state, pr, new IndexTemplateMetadata.Builder("id"));
|
||||
|
||||
assertNotNull(state.metadata().templates().get("id"));
|
||||
|
||||
assertThat(MetadataIndexTemplateService.innerPutTemplate(state, pr, new IndexTemplateMetadata.Builder("id")), equalTo(state));
|
||||
}
|
||||
|
||||
private static List<Throwable> putTemplate(NamedXContentRegistry xContentRegistry, PutRequest request) {
|
||||
MetadataCreateIndexService createIndexService = new MetadataCreateIndexService(
|
||||
Settings.EMPTY,
|
||||
|
|
|
@ -39,6 +39,7 @@ import org.opensearch.cluster.service.ClusterService;
|
|||
import org.opensearch.common.compress.CompressedXContent;
|
||||
import org.opensearch.index.Index;
|
||||
import org.opensearch.index.IndexService;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
import org.opensearch.plugins.Plugin;
|
||||
import org.opensearch.test.OpenSearchSingleNodeTestCase;
|
||||
import org.opensearch.test.InternalSettingsPlugin;
|
||||
|
@ -57,8 +58,11 @@ public class MetadataMappingServiceTests extends OpenSearchSingleNodeTestCase {
|
|||
}
|
||||
|
||||
public void testMappingClusterStateUpdateDoesntChangeExistingIndices() throws Exception {
|
||||
final IndexService indexService = createIndex("test", client().admin().indices().prepareCreate("test").addMapping("type"));
|
||||
final CompressedXContent currentMapping = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
final IndexService indexService = createIndex(
|
||||
"test",
|
||||
client().admin().indices().prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME)
|
||||
);
|
||||
final CompressedXContent currentMapping = indexService.mapperService().documentMapper().mappingSource();
|
||||
|
||||
final MetadataMappingService mappingService = getInstanceFromNode(MetadataMappingService.class);
|
||||
final ClusterService clusterService = getInstanceFromNode(ClusterService.class);
|
||||
|
@ -74,11 +78,11 @@ public class MetadataMappingServiceTests extends OpenSearchSingleNodeTestCase {
|
|||
assertTrue(result.executionResults.values().iterator().next().isSuccess());
|
||||
// the task really was a mapping update
|
||||
assertThat(
|
||||
indexService.mapperService().documentMapper("type").mappingSource(),
|
||||
not(equalTo(result.resultingState.metadata().index("test").getMappings().get("type").source()))
|
||||
indexService.mapperService().documentMapper().mappingSource(),
|
||||
not(equalTo(result.resultingState.metadata().index("test").getMappings().get(MapperService.SINGLE_MAPPING_NAME).source()))
|
||||
);
|
||||
// since we never committed the cluster state update, the in-memory state is unchanged
|
||||
assertThat(indexService.mapperService().documentMapper("type").mappingSource(), equalTo(currentMapping));
|
||||
assertThat(indexService.mapperService().documentMapper().mappingSource(), equalTo(currentMapping));
|
||||
}
|
||||
|
||||
public void testClusterStateIsNotChangedWithIdenticalMappings() throws Exception {
|
||||
|
|
|
@ -264,11 +264,9 @@ public class ToAndFromJsonMetadataTests extends OpenSearchTestCase {
|
|||
+ Version.CURRENT.id
|
||||
+ "\"\n"
|
||||
+ " },\n"
|
||||
+ " \"mappings\" : [\n"
|
||||
+ " {\n"
|
||||
+ " \"key1\" : { }\n"
|
||||
+ " }\n"
|
||||
+ " ],\n"
|
||||
+ " \"mappings\" : {\n"
|
||||
+ " \"key1\" : { }\n"
|
||||
+ " },\n"
|
||||
+ " \"aliases\" : { }\n"
|
||||
+ " }\n"
|
||||
+ " },\n"
|
||||
|
@ -434,11 +432,7 @@ public class ToAndFromJsonMetadataTests extends OpenSearchTestCase {
|
|||
+ " }\n"
|
||||
+ " }\n"
|
||||
+ " },\n"
|
||||
+ " \"mappings\" : {\n"
|
||||
+ " \"type\" : {\n"
|
||||
+ " \"key1\" : { }\n"
|
||||
+ " }\n"
|
||||
+ " },\n"
|
||||
+ " \"mappings\" : { },\n"
|
||||
+ " \"aliases\" : { }\n"
|
||||
+ " }\n"
|
||||
+ " },\n"
|
||||
|
@ -500,9 +494,7 @@ public class ToAndFromJsonMetadataTests extends OpenSearchTestCase {
|
|||
+ "\"\n"
|
||||
+ " },\n"
|
||||
+ " \"mappings\" : {\n"
|
||||
+ " \"type\" : {\n"
|
||||
+ " \"key1\" : { }\n"
|
||||
+ " }\n"
|
||||
+ " \"key1\" : { }\n"
|
||||
+ " },\n"
|
||||
+ " \"aliases\" : { }\n"
|
||||
+ " }\n"
|
||||
|
@ -610,11 +602,7 @@ public class ToAndFromJsonMetadataTests extends OpenSearchTestCase {
|
|||
+ " }\n"
|
||||
+ " }\n"
|
||||
+ " },\n"
|
||||
+ " \"mappings\" : {\n"
|
||||
+ " \"type\" : {\n"
|
||||
+ " \"key1\" : { }\n"
|
||||
+ " }\n"
|
||||
+ " },\n"
|
||||
+ " \"mappings\" : { },\n"
|
||||
+ " \"aliases\" : { }\n"
|
||||
+ " }\n"
|
||||
+ " },\n"
|
||||
|
|
|
@ -57,7 +57,7 @@ public class AllFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
);
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc").endObject().endObject());
|
||||
indexService.mapperService().merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
||||
assertEquals(mapping, indexService.mapperService().documentMapper("_doc").mapping().toString());
|
||||
assertEquals(mapping, indexService.mapperService().documentMapper().mapping().toString());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -363,7 +363,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
|
|||
ParseContext context = new ParseContext.InternalParseContext(
|
||||
settings,
|
||||
mapperService.documentMapperParser(),
|
||||
mapperService.documentMapper("type"),
|
||||
mapperService.documentMapper(),
|
||||
null,
|
||||
null
|
||||
);
|
||||
|
|
|
@ -93,7 +93,7 @@ public class FieldFilterMapperPluginTests extends OpenSearchSingleNodeTestCase {
|
|||
|
||||
public void testGetFieldMappings() {
|
||||
GetFieldMappingsResponse getFieldMappingsResponse = client().admin().indices().prepareGetFieldMappings().setFields("*").get();
|
||||
Map<String, Map<String, Map<String, GetFieldMappingsResponse.FieldMappingMetadata>>> mappings = getFieldMappingsResponse.mappings();
|
||||
Map<String, Map<String, GetFieldMappingsResponse.FieldMappingMetadata>> mappings = getFieldMappingsResponse.mappings();
|
||||
assertEquals(2, mappings.size());
|
||||
assertFieldMappings(mappings.get("index1"), ALL_FLAT_FIELDS);
|
||||
assertFieldMappings(mappings.get("filtered"), FILTERED_FLAT_FIELDS);
|
||||
|
@ -107,6 +107,14 @@ public class FieldFilterMapperPluginTests extends OpenSearchSingleNodeTestCase {
|
|||
assertFieldMappings(response.mappings().get("test"), FILTERED_FLAT_FIELDS);
|
||||
}
|
||||
|
||||
public void testGetNonExistentFieldMapping() {
|
||||
GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings("index1").setFields("non-existent").get();
|
||||
Map<String, Map<String, GetFieldMappingsResponse.FieldMappingMetadata>> mappings = response.mappings();
|
||||
assertEquals(1, mappings.size());
|
||||
Map<String, GetFieldMappingsResponse.FieldMappingMetadata> fieldmapping = mappings.get("index1");
|
||||
assertEquals(0, fieldmapping.size());
|
||||
}
|
||||
|
||||
public void testFieldCapabilities() {
|
||||
List<String> allFields = new ArrayList<>(ALL_FLAT_FIELDS);
|
||||
allFields.addAll(ALL_OBJECT_FIELDS);
|
||||
|
@ -142,11 +150,10 @@ public class FieldFilterMapperPluginTests extends OpenSearchSingleNodeTestCase {
|
|||
}
|
||||
|
||||
private static void assertFieldMappings(
|
||||
Map<String, Map<String, GetFieldMappingsResponse.FieldMappingMetadata>> mappings,
|
||||
Map<String, GetFieldMappingsResponse.FieldMappingMetadata> actual,
|
||||
Collection<String> expectedFields
|
||||
) {
|
||||
assertEquals(1, mappings.size());
|
||||
Map<String, GetFieldMappingsResponse.FieldMappingMetadata> fields = new HashMap<>(mappings.get("_doc"));
|
||||
Map<String, GetFieldMappingsResponse.FieldMappingMetadata> fields = new HashMap<>(actual);
|
||||
Set<String> builtInMetadataFields = IndicesModule.getBuiltInMetadataFields();
|
||||
for (String field : builtInMetadataFields) {
|
||||
GetFieldMappingsResponse.FieldMappingMetadata fieldMappingMetadata = fields.remove(field);
|
||||
|
|
|
@ -78,32 +78,6 @@ public class MapperServiceTests extends OpenSearchSingleNodeTestCase {
|
|||
return Arrays.asList(InternalSettingsPlugin.class, ReloadableFilterPlugin.class);
|
||||
}
|
||||
|
||||
public void testTypeNameStartsWithIllegalDot() {
|
||||
String index = "test-index";
|
||||
String type = ".test-type";
|
||||
String field = "field";
|
||||
IllegalArgumentException e = expectThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> { client().admin().indices().prepareCreate(index).addMapping(type, field, "type=text").execute().actionGet(); }
|
||||
);
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("mapping type name [.test-type] must not start with a '.'"));
|
||||
}
|
||||
|
||||
public void testTypeNameTooLong() {
|
||||
String index = "text-index";
|
||||
String field = "field";
|
||||
String type = new String(new char[256]).replace("\0", "a");
|
||||
|
||||
MapperException e = expectThrows(
|
||||
MapperException.class,
|
||||
() -> { client().admin().indices().prepareCreate(index).addMapping(type, field, "type=text").execute().actionGet(); }
|
||||
);
|
||||
assertTrue(
|
||||
e.getMessage(),
|
||||
e.getMessage().contains("mapping type name [" + type + "] is too long; limit is length 255 but was [256]")
|
||||
);
|
||||
}
|
||||
|
||||
public void testTypeValidation() {
|
||||
InvalidTypeNameException e = expectThrows(InvalidTypeNameException.class, () -> MapperService.validateTypeName("_type"));
|
||||
assertEquals("mapping type name [_type] can't start with '_' unless it is called [_doc]", e.getMessage());
|
||||
|
|
|
@ -63,16 +63,13 @@ public class UpdateMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
// test store, ... all the parameters that are not to be changed just like in other fields
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type")
|
||||
.startObject(fieldName)
|
||||
.field("enabled", true)
|
||||
.field("store", false)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
XContentBuilder mappingUpdate = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type")
|
||||
.startObject(fieldName)
|
||||
.field("enabled", true)
|
||||
.field("store", true)
|
||||
|
@ -82,65 +79,73 @@ public class UpdateMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
testConflictWhileMergingAndMappingUnchanged(mapping, mappingUpdate);
|
||||
}
|
||||
|
||||
protected void testConflictWhileMergingAndMappingUnchanged(XContentBuilder mapping, XContentBuilder mappingUpdate) throws IOException {
|
||||
IndexService indexService = createIndex("test", Settings.builder().build(), "type", mapping);
|
||||
CompressedXContent mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
IndexService indexService = createIndex("test", Settings.builder().build(), MapperService.SINGLE_MAPPING_NAME, mapping);
|
||||
CompressedXContent mappingBeforeUpdate = indexService.mapperService().documentMapper().mappingSource();
|
||||
// simulate like in MetadataMappingService#putMapping
|
||||
try {
|
||||
indexService.mapperService()
|
||||
.merge("type", new CompressedXContent(BytesReference.bytes(mappingUpdate)), MapperService.MergeReason.MAPPING_UPDATE);
|
||||
.merge(
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
new CompressedXContent(BytesReference.bytes(mappingUpdate)),
|
||||
MapperService.MergeReason.MAPPING_UPDATE
|
||||
);
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
// make sure simulate flag actually worked - no mappings applied
|
||||
CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper().mappingSource();
|
||||
assertThat(mappingAfterUpdate, equalTo(mappingBeforeUpdate));
|
||||
}
|
||||
|
||||
public void testConflictSameType() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("foo")
|
||||
.field("type", "long")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
MapperService mapperService = createIndex("test", Settings.builder().build(), "type", mapping).mapperService();
|
||||
MapperService mapperService = createIndex("test", Settings.builder().build(), MapperService.SINGLE_MAPPING_NAME, mapping)
|
||||
.mapperService();
|
||||
|
||||
XContentBuilder update = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("foo")
|
||||
.field("type", "double")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
|
||||
IllegalArgumentException e = expectThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> mapperService.merge("type", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE)
|
||||
() -> mapperService.merge(
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
new CompressedXContent(Strings.toString(update)),
|
||||
MapperService.MergeReason.MAPPING_UPDATE
|
||||
)
|
||||
);
|
||||
assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]"));
|
||||
|
||||
e = expectThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> mapperService.merge("type", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE)
|
||||
() -> mapperService.merge(
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
new CompressedXContent(Strings.toString(update)),
|
||||
MapperService.MergeReason.MAPPING_UPDATE
|
||||
)
|
||||
);
|
||||
assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]"));
|
||||
|
||||
assertThat(
|
||||
((FieldMapper) mapperService.documentMapper("type").mapping().root().getMapper("foo")).fieldType().typeName(),
|
||||
((FieldMapper) mapperService.documentMapper().mapping().root().getMapper("foo")).fieldType().typeName(),
|
||||
equalTo("long")
|
||||
);
|
||||
}
|
||||
|
@ -148,35 +153,36 @@ public class UpdateMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
public void testConflictNewType() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("foo")
|
||||
.field("type", "long")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
MapperService mapperService = createIndex("test", Settings.builder().build(), "type", mapping).mapperService();
|
||||
MapperService mapperService = createIndex("test", Settings.builder().build(), MapperService.SINGLE_MAPPING_NAME, mapping)
|
||||
.mapperService();
|
||||
|
||||
XContentBuilder update = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("foo")
|
||||
.field("type", "double")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
|
||||
IllegalArgumentException e = expectThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> mapperService.merge("type", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE)
|
||||
() -> mapperService.merge(
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
new CompressedXContent(Strings.toString(update)),
|
||||
MapperService.MergeReason.MAPPING_UPDATE
|
||||
)
|
||||
);
|
||||
assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]"));
|
||||
|
||||
assertThat(
|
||||
((FieldMapper) mapperService.documentMapper("type").mapping().root().getMapper("foo")).fieldType().typeName(),
|
||||
((FieldMapper) mapperService.documentMapper().mapping().root().getMapper("foo")).fieldType().typeName(),
|
||||
equalTo("long")
|
||||
);
|
||||
}
|
||||
|
@ -184,25 +190,31 @@ public class UpdateMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
public void testReuseMetaField() throws IOException {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("_id")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
MapperService mapperService = createIndex("test", Settings.builder().build()).mapperService();
|
||||
|
||||
MapperParsingException e = expectThrows(
|
||||
MapperParsingException.class,
|
||||
() -> mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE)
|
||||
() -> mapperService.merge(
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
new CompressedXContent(Strings.toString(mapping)),
|
||||
MapperService.MergeReason.MAPPING_UPDATE
|
||||
)
|
||||
);
|
||||
assertThat(e.getMessage(), containsString("Field [_id] is defined more than once"));
|
||||
|
||||
MapperParsingException e2 = expectThrows(
|
||||
MapperParsingException.class,
|
||||
() -> mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE)
|
||||
() -> mapperService.merge(
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
new CompressedXContent(Strings.toString(mapping)),
|
||||
MapperService.MergeReason.MAPPING_UPDATE
|
||||
)
|
||||
);
|
||||
assertThat(e2.getMessage(), containsString("Field [_id] is defined more than once"));
|
||||
}
|
||||
|
@ -211,47 +223,43 @@ public class UpdateMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
String mapping1 = Strings.toString(
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("foo")
|
||||
.field("type", "object")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
);
|
||||
String mapping2 = Strings.toString(
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("foo")
|
||||
.field("type", "long")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
);
|
||||
|
||||
MapperService mapperService1 = createIndex("test1").mapperService();
|
||||
mapperService1.merge("type", new CompressedXContent(mapping1), MergeReason.MAPPING_UPDATE);
|
||||
mapperService1.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping1), MergeReason.MAPPING_UPDATE);
|
||||
IllegalArgumentException e = expectThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> mapperService1.merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE)
|
||||
() -> mapperService1.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE)
|
||||
);
|
||||
assertThat(e.getMessage(), equalTo("can't merge a non object mapping [foo] with an object mapping"));
|
||||
|
||||
MapperService mapperService2 = createIndex("test2").mapperService();
|
||||
mapperService2.merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE);
|
||||
mapperService2.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE);
|
||||
e = expectThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> mapperService2.merge("type", new CompressedXContent(mapping1), MergeReason.MAPPING_UPDATE)
|
||||
() -> mapperService2.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping1), MergeReason.MAPPING_UPDATE)
|
||||
);
|
||||
assertThat(e.getMessage(), equalTo("can't merge a non object mapping [foo] with an object mapping"));
|
||||
}
|
||||
|
||||
public void testMappingVersion() {
|
||||
createIndex("test", client().admin().indices().prepareCreate("test").addMapping("type"));
|
||||
createIndex("test", client().admin().indices().prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME));
|
||||
final ClusterService clusterService = getInstanceFromNode(ClusterService.class);
|
||||
{
|
||||
final long previousVersion = clusterService.state().metadata().index("test").getMappingVersion();
|
||||
|
|
|
@ -50,7 +50,7 @@ public class TypeQueryBuilderTests extends AbstractQueryTestCase<TypeQueryBuilde
|
|||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(TypeQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
if (createShardContext().getMapperService().documentMapper(queryBuilder.type()) == null) {
|
||||
if (createShardContext().getMapperService().documentMapper() == null) {
|
||||
assertEquals(new MatchNoDocsQuery(), query);
|
||||
} else {
|
||||
assertThat(query, equalTo(Queries.newNonNestedFilter(context.indexVersionCreated())));
|
||||
|
|
|
@ -79,7 +79,6 @@ import org.opensearch.common.io.stream.BytesStreamOutput;
|
|||
import org.opensearch.common.io.stream.StreamInput;
|
||||
import org.opensearch.common.lease.Releasable;
|
||||
import org.opensearch.common.lease.Releasables;
|
||||
import org.opensearch.common.lucene.uid.Versions;
|
||||
import org.opensearch.common.settings.IndexScopedSettings;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.common.unit.TimeValue;
|
||||
|
@ -92,12 +91,10 @@ import org.opensearch.common.xcontent.XContentFactory;
|
|||
import org.opensearch.common.xcontent.XContentType;
|
||||
import org.opensearch.env.NodeEnvironment;
|
||||
import org.opensearch.index.IndexSettings;
|
||||
import org.opensearch.index.VersionType;
|
||||
import org.opensearch.index.codec.CodecService;
|
||||
import org.opensearch.index.engine.CommitStats;
|
||||
import org.opensearch.index.engine.DocIdSeqNoAndSource;
|
||||
import org.opensearch.index.engine.Engine;
|
||||
import org.opensearch.index.engine.Engine.DeleteResult;
|
||||
import org.opensearch.index.engine.EngineConfig;
|
||||
import org.opensearch.index.engine.EngineConfigFactory;
|
||||
import org.opensearch.index.engine.EngineTestCase;
|
||||
|
@ -3358,11 +3355,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
|
||||
// Do some updates and deletes, then recheck the correlation again.
|
||||
for (int i = 0; i < numDoc / 2; i++) {
|
||||
if (randomBoolean()) {
|
||||
deleteDoc(indexShard, "doc", Integer.toString(i));
|
||||
} else {
|
||||
indexDoc(indexShard, "_doc", Integer.toString(i), "{\"foo\": \"bar\"}");
|
||||
}
|
||||
indexDoc(indexShard, "_doc", Integer.toString(i), "{\"foo\": \"bar\"}");
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
indexShard.flush(new FlushRequest());
|
||||
|
@ -3939,7 +3932,10 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
public void testSupplyTombstoneDoc() throws Exception {
|
||||
IndexShard shard = newStartedShard();
|
||||
String id = randomRealisticUnicodeOfLengthBetween(1, 10);
|
||||
ParsedDocument deleteTombstone = shard.getEngine().config().getTombstoneDocSupplier().newDeleteTombstoneDoc("doc", id);
|
||||
ParsedDocument deleteTombstone = shard.getEngine()
|
||||
.config()
|
||||
.getTombstoneDocSupplier()
|
||||
.newDeleteTombstoneDoc(MapperService.SINGLE_MAPPING_NAME, id);
|
||||
assertThat(deleteTombstone.docs(), hasSize(1));
|
||||
ParseContext.Document deleteDoc = deleteTombstone.docs().get(0);
|
||||
assertThat(
|
||||
|
@ -4294,38 +4290,6 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
return Settings.builder().put(super.threadPoolSettings()).put("thread_pool.estimated_time_interval", "5ms").build();
|
||||
}
|
||||
|
||||
public void testTypelessDelete() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1)
|
||||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.build();
|
||||
IndexMetadata metadata = IndexMetadata.builder("index")
|
||||
.putMapping("some_type", "{ \"properties\": {}}")
|
||||
.settings(settings)
|
||||
.primaryTerm(0, 1)
|
||||
.build();
|
||||
IndexShard shard = newShard(new ShardId(metadata.getIndex(), 0), true, "n1", metadata, null);
|
||||
recoverShardFromStore(shard);
|
||||
Engine.IndexResult indexResult = indexDoc(shard, "some_type", "id", "{}");
|
||||
assertTrue(indexResult.isCreated());
|
||||
|
||||
DeleteResult deleteResult = shard.applyDeleteOperationOnPrimary(
|
||||
Versions.MATCH_ANY,
|
||||
"some_other_type",
|
||||
"id",
|
||||
VersionType.INTERNAL,
|
||||
UNASSIGNED_SEQ_NO,
|
||||
1
|
||||
);
|
||||
assertFalse(deleteResult.isFound());
|
||||
|
||||
deleteResult = shard.applyDeleteOperationOnPrimary(Versions.MATCH_ANY, "_doc", "id", VersionType.INTERNAL, UNASSIGNED_SEQ_NO, 1);
|
||||
assertTrue(deleteResult.isFound());
|
||||
|
||||
closeShards(shard);
|
||||
}
|
||||
|
||||
public void testTypelessGet() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
|
|
|
@ -64,7 +64,6 @@ public class GeoContextMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
|
||||
public void testIndexingWithNoContexts() throws Exception {
|
||||
XContentBuilder mapping = jsonBuilder().startObject()
|
||||
.startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("completion")
|
||||
.field("type", "completion")
|
||||
|
@ -76,16 +75,15 @@ public class GeoContextMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
.endArray()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
|
||||
MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService();
|
||||
MapperService mapperService = createIndex("test", Settings.EMPTY, MapperService.SINGLE_MAPPING_NAME, mapping).mapperService();
|
||||
MappedFieldType completionFieldType = mapperService.fieldType("completion");
|
||||
ParsedDocument parsedDocument = mapperService.documentMapper()
|
||||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type1",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
jsonBuilder().startObject()
|
||||
|
@ -114,7 +112,6 @@ public class GeoContextMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
|
||||
public void testIndexingWithSimpleContexts() throws Exception {
|
||||
XContentBuilder mapping = jsonBuilder().startObject()
|
||||
.startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("completion")
|
||||
.field("type", "completion")
|
||||
|
@ -126,16 +123,15 @@ public class GeoContextMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
.endArray()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
|
||||
MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService();
|
||||
MapperService mapperService = createIndex("test", Settings.EMPTY, MapperService.SINGLE_MAPPING_NAME, mapping).mapperService();
|
||||
MappedFieldType completionFieldType = mapperService.fieldType("completion");
|
||||
ParsedDocument parsedDocument = mapperService.documentMapper()
|
||||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type1",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
jsonBuilder().startObject()
|
||||
|
@ -162,7 +158,6 @@ public class GeoContextMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
|
||||
public void testIndexingWithContextList() throws Exception {
|
||||
XContentBuilder mapping = jsonBuilder().startObject()
|
||||
.startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("completion")
|
||||
.field("type", "completion")
|
||||
|
@ -174,16 +169,15 @@ public class GeoContextMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
.endArray()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
|
||||
MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService();
|
||||
MapperService mapperService = createIndex("test", Settings.EMPTY, MapperService.SINGLE_MAPPING_NAME, mapping).mapperService();
|
||||
MappedFieldType completionFieldType = mapperService.fieldType("completion");
|
||||
ParsedDocument parsedDocument = mapperService.documentMapper()
|
||||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type1",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
jsonBuilder().startObject()
|
||||
|
@ -214,7 +208,6 @@ public class GeoContextMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
|
||||
public void testIndexingWithMultipleContexts() throws Exception {
|
||||
XContentBuilder mapping = jsonBuilder().startObject()
|
||||
.startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("completion")
|
||||
.field("type", "completion")
|
||||
|
@ -230,10 +223,9 @@ public class GeoContextMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
.endArray()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
|
||||
MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService();
|
||||
MapperService mapperService = createIndex("test", Settings.EMPTY, MapperService.SINGLE_MAPPING_NAME, mapping).mapperService();
|
||||
MappedFieldType completionFieldType = mapperService.fieldType("completion");
|
||||
XContentBuilder builder = jsonBuilder().startObject()
|
||||
.startArray("completion")
|
||||
|
@ -248,7 +240,7 @@ public class GeoContextMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
.endArray()
|
||||
.endObject();
|
||||
ParsedDocument parsedDocument = mapperService.documentMapper()
|
||||
.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON));
|
||||
.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(builder), XContentType.JSON));
|
||||
IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name());
|
||||
assertContextSuggestFields(fields, 3);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue