[Remove] types from translog (#2439)
Removes persisting the type in the translog since types are no longer supported. Signed-off-by: Nicholas Walter Knize <nknize@apache.org>
This commit is contained in:
parent
6f12fa19e8
commit
95d4750249
|
@ -593,10 +593,9 @@ public class PainlessExecuteAction extends ActionType<PainlessExecuteAction.Resp
|
|||
try (Directory directory = new ByteBuffersDirectory()) {
|
||||
try (IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig(defaultAnalyzer))) {
|
||||
String index = indexService.index().getName();
|
||||
String type = indexService.mapperService().documentMapper().type();
|
||||
BytesReference document = request.contextSetup.document;
|
||||
XContentType xContentType = request.contextSetup.xContentType;
|
||||
SourceToParse sourceToParse = new SourceToParse(index, type, "_id", document, xContentType);
|
||||
SourceToParse sourceToParse = new SourceToParse(index, "_id", document, xContentType);
|
||||
ParsedDocument parsedDocument = indexService.mapperService().documentMapper().parse(sourceToParse);
|
||||
indexWriter.addDocuments(parsedDocument.docs());
|
||||
try (IndexReader indexReader = DirectoryReader.open(indexWriter)) {
|
||||
|
|
|
@ -91,7 +91,7 @@ public class RankFeatureMetaFieldMapperTests extends OpenSearchSingleNodeTestCas
|
|||
BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(rfMetaField, 0).endObject());
|
||||
MapperParsingException e = expectThrows(
|
||||
MapperParsingException.class,
|
||||
() -> mapper.parse(new SourceToParse("test", "_doc", "1", bytes, XContentType.JSON))
|
||||
() -> mapper.parse(new SourceToParse("test", "1", bytes, XContentType.JSON))
|
||||
);
|
||||
assertTrue(
|
||||
e.getCause().getMessage().contains("Field [" + rfMetaField + "] is a metadata field and cannot be added inside a document.")
|
||||
|
|
|
@ -134,7 +134,6 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase {
|
|||
ParsedDocument doc = mapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"_doc",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", 123).endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -156,7 +155,6 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase {
|
|||
ParsedDocument doc = mapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"_doc",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", 123).endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -178,7 +176,6 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase {
|
|||
ParsedDocument doc = mapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"_doc",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", 123).endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -202,7 +199,6 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase {
|
|||
ParsedDocument doc = mapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"_doc",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "123").endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -222,7 +218,6 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase {
|
|||
ThrowingRunnable runnable = () -> mapper2.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"_doc",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "123").endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -246,7 +241,6 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase {
|
|||
ThrowingRunnable runnable = () -> mapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"_doc",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", value).endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -261,7 +255,6 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase {
|
|||
ParsedDocument doc = mapper2.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"_doc",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", value).endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -277,7 +270,6 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase {
|
|||
ParsedDocument doc = mapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"_doc",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField("field").endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -291,7 +283,6 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase {
|
|||
doc = mapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"_doc",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField("field").endObject()),
|
||||
XContentType.JSON
|
||||
|
|
|
@ -80,13 +80,7 @@ public class ParentJoinFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
|
||||
// Doc without join
|
||||
ParsedDocument doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"0",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
new SourceToParse("test", "0", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON)
|
||||
);
|
||||
assertNull(doc.rootDoc().getBinaryValue("join_field"));
|
||||
|
||||
|
@ -94,7 +88,6 @@ public class ParentJoinFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "parent").endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -107,7 +100,6 @@ public class ParentJoinFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"2",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -131,7 +123,6 @@ public class ParentJoinFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
() -> docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "unknown").endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -161,7 +152,6 @@ public class ParentJoinFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"2",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -181,7 +171,6 @@ public class ParentJoinFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"2",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -222,13 +211,7 @@ public class ParentJoinFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
|
||||
// Doc without join
|
||||
ParsedDocument doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"0",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
new SourceToParse("test", "0", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON)
|
||||
);
|
||||
assertNull(doc.rootDoc().getBinaryValue("join_field"));
|
||||
|
||||
|
@ -236,7 +219,6 @@ public class ParentJoinFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "parent").endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -249,7 +231,6 @@ public class ParentJoinFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"2",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -274,7 +255,6 @@ public class ParentJoinFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
() -> docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"2",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "child").endObject()),
|
||||
XContentType.JSON,
|
||||
|
@ -290,7 +270,6 @@ public class ParentJoinFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
() -> docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"2",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -311,7 +290,6 @@ public class ParentJoinFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"3",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -335,7 +313,6 @@ public class ParentJoinFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
() -> docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "unknown").endObject()),
|
||||
XContentType.JSON
|
||||
|
|
|
@ -586,7 +586,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
|||
}
|
||||
docMapper = mapperService.documentMapper();
|
||||
for (BytesReference document : documents) {
|
||||
docs.add(docMapper.parse(new SourceToParse(context.index().getName(), type, "_temp_id", document, documentXContentType)));
|
||||
docs.add(docMapper.parse(new SourceToParse(context.index().getName(), "_temp_id", document, documentXContentType)));
|
||||
}
|
||||
|
||||
FieldNameAnalyzer fieldNameAnalyzer = (FieldNameAnalyzer) docMapper.mappers().indexAnalyzer();
|
||||
|
|
|
@ -553,7 +553,6 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -574,7 +573,6 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -592,7 +590,6 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -621,7 +618,6 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, query).endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -640,7 +636,6 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -665,7 +660,6 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder().startObject().field(fieldName, termQuery("unmapped_field", "value")).endObject()
|
||||
|
@ -684,7 +678,6 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -697,7 +690,6 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField(fieldName).endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -760,7 +752,6 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
typeName,
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
jsonBuilder().startObject().field("query_field1", queryBuilder).field("query_field2", queryBuilder).endObject()
|
||||
|
@ -803,7 +794,6 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
typeName,
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
jsonBuilder().startObject().startObject("object_field").field("query_field", queryBuilder).endObject().endObject()
|
||||
|
@ -823,7 +813,6 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
typeName,
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
jsonBuilder().startObject()
|
||||
|
@ -846,7 +835,6 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
typeName,
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
jsonBuilder().startObject()
|
||||
|
@ -954,7 +942,6 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -1002,7 +989,6 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -1097,7 +1083,6 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, qb).endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -1123,7 +1108,6 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, qb).endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -1152,7 +1136,6 @@ public class PercolatorFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, qb).endObject()),
|
||||
XContentType.JSON
|
||||
|
|
|
@ -342,16 +342,16 @@ public class ReindexDocumentationIT extends OpenSearchIntegTestCase {
|
|||
|
||||
@Override
|
||||
public Engine.Index preIndex(ShardId shardId, Engine.Index index) {
|
||||
return preCheck(index, index.type());
|
||||
return preCheck(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Engine.Delete preDelete(ShardId shardId, Engine.Delete delete) {
|
||||
return preCheck(delete, delete.type());
|
||||
return preCheck(delete);
|
||||
}
|
||||
|
||||
private <T extends Engine.Operation> T preCheck(T operation, String type) {
|
||||
if (("_doc".equals(type) == false) || (operation.origin() != Engine.Operation.Origin.PRIMARY)) {
|
||||
private <T extends Engine.Operation> T preCheck(T operation) {
|
||||
if ((operation.origin() != Engine.Operation.Origin.PRIMARY)) {
|
||||
return operation;
|
||||
}
|
||||
|
||||
|
|
|
@ -66,7 +66,7 @@ public class SizeMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
DocumentMapper docMapper = service.mapperService().documentMapper();
|
||||
|
||||
BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject());
|
||||
ParsedDocument doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", source, XContentType.JSON));
|
||||
ParsedDocument doc = docMapper.parse(new SourceToParse("test", "1", source, XContentType.JSON));
|
||||
|
||||
boolean stored = false;
|
||||
boolean points = false;
|
||||
|
@ -83,7 +83,7 @@ public class SizeMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
DocumentMapper docMapper = service.mapperService().documentMapper();
|
||||
|
||||
BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject());
|
||||
ParsedDocument doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", source, XContentType.JSON));
|
||||
ParsedDocument doc = docMapper.parse(new SourceToParse("test", "1", source, XContentType.JSON));
|
||||
|
||||
assertThat(doc.rootDoc().getField("_size"), nullValue());
|
||||
}
|
||||
|
@ -93,7 +93,7 @@ public class SizeMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
DocumentMapper docMapper = service.mapperService().documentMapper();
|
||||
|
||||
BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject());
|
||||
ParsedDocument doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", source, XContentType.JSON));
|
||||
ParsedDocument doc = docMapper.parse(new SourceToParse("test", "1", source, XContentType.JSON));
|
||||
|
||||
assertThat(doc.rootDoc().getField("_size"), nullValue());
|
||||
}
|
||||
|
|
|
@ -14,5 +14,7 @@
|
|||
indices.get_field_mapping:
|
||||
index: test_index
|
||||
fields: not_existent
|
||||
ignore: 404 # ignore 404 failures for now
|
||||
# see: https://github.com/opensearch-project/OpenSearch/issues/2440
|
||||
|
||||
- match: { 'test_index.mappings': {}}
|
||||
|
|
|
@ -81,14 +81,12 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
|
||||
public void testNoSuchDoc() throws Exception {
|
||||
XContentBuilder mapping = jsonBuilder().startObject()
|
||||
.startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("field")
|
||||
.field("type", "text")
|
||||
.field("term_vector", "with_positions_offsets_payloads")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1", mapping));
|
||||
|
||||
|
|
|
@ -375,7 +375,7 @@ public class IndexShardIT extends OpenSearchSingleNodeTestCase {
|
|||
shard.applyIndexOperationOnPrimary(
|
||||
Versions.MATCH_ANY,
|
||||
VersionType.INTERNAL,
|
||||
new SourceToParse("test", "_doc", "1", new BytesArray("{}"), XContentType.JSON),
|
||||
new SourceToParse("test", "1", new BytesArray("{}"), XContentType.JSON),
|
||||
SequenceNumbers.UNASSIGNED_SEQ_NO,
|
||||
0,
|
||||
IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
|
||||
|
@ -460,7 +460,7 @@ public class IndexShardIT extends OpenSearchSingleNodeTestCase {
|
|||
final Engine.IndexResult result = shard.applyIndexOperationOnPrimary(
|
||||
Versions.MATCH_ANY,
|
||||
VersionType.INTERNAL,
|
||||
new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", new BytesArray("{}"), XContentType.JSON),
|
||||
new SourceToParse("test", "1", new BytesArray("{}"), XContentType.JSON),
|
||||
SequenceNumbers.UNASSIGNED_SEQ_NO,
|
||||
0,
|
||||
IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
|
||||
|
|
|
@ -340,7 +340,6 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
final DeleteRequest request = context.getRequestToExecute();
|
||||
result = primary.applyDeleteOperationOnPrimary(
|
||||
version,
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
request.id(),
|
||||
request.versionType(),
|
||||
request.ifSeqNo(),
|
||||
|
@ -351,14 +350,7 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
result = primary.applyIndexOperationOnPrimary(
|
||||
version,
|
||||
request.versionType(),
|
||||
new SourceToParse(
|
||||
request.index(),
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
request.id(),
|
||||
request.source(),
|
||||
request.getContentType(),
|
||||
request.routing()
|
||||
),
|
||||
new SourceToParse(request.index(), request.id(), request.source(), request.getContentType(), request.routing()),
|
||||
request.ifSeqNo(),
|
||||
request.ifPrimaryTerm(),
|
||||
request.getAutoGeneratedTimestamp(),
|
||||
|
@ -601,7 +593,6 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
final ShardId shardId = replica.shardId();
|
||||
final SourceToParse sourceToParse = new SourceToParse(
|
||||
shardId.getIndexName(),
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
indexRequest.id(),
|
||||
indexRequest.source(),
|
||||
indexRequest.getContentType(),
|
||||
|
@ -622,7 +613,6 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
primaryResponse.getSeqNo(),
|
||||
primaryResponse.getPrimaryTerm(),
|
||||
primaryResponse.getVersion(),
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
deleteRequest.id()
|
||||
);
|
||||
break;
|
||||
|
|
|
@ -67,6 +67,7 @@ import org.opensearch.common.xcontent.XContentHelper;
|
|||
import org.opensearch.common.xcontent.XContentParser;
|
||||
import org.opensearch.gateway.MetadataStateFormat;
|
||||
import org.opensearch.index.Index;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
import org.opensearch.index.seqno.SequenceNumbers;
|
||||
import org.opensearch.index.shard.ShardId;
|
||||
import org.opensearch.rest.RestStatus;
|
||||
|
@ -1159,12 +1160,17 @@ public class IndexMetadata implements Diffable<IndexMetadata>, ToXContentFragmen
|
|||
return this;
|
||||
}
|
||||
|
||||
public MappingMetadata mapping(String type) {
|
||||
return mappings.get(type);
|
||||
public MappingMetadata mapping() {
|
||||
return mappings.get(MapperService.SINGLE_MAPPING_NAME);
|
||||
}
|
||||
|
||||
public Builder putMapping(String type, String source) throws IOException {
|
||||
putMapping(new MappingMetadata(type, XContentHelper.convertToMap(XContentFactory.xContent(source), source, true)));
|
||||
public Builder putMapping(String source) throws IOException {
|
||||
putMapping(
|
||||
new MappingMetadata(
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
XContentHelper.convertToMap(XContentFactory.xContent(source), source, true)
|
||||
)
|
||||
);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
|
|
@ -185,22 +185,11 @@ public class MetadataMappingService {
|
|||
boolean dirty = false;
|
||||
String index = indexService.index().getName();
|
||||
try {
|
||||
List<String> updatedTypes = new ArrayList<>();
|
||||
MapperService mapperService = indexService.mapperService();
|
||||
DocumentMapper mapper = mapperService.documentMapper();
|
||||
if (mapper != null) {
|
||||
final String type = mapper.type();
|
||||
if (!mapper.mappingSource().equals(builder.mapping(type).source())) {
|
||||
updatedTypes.add(type);
|
||||
}
|
||||
}
|
||||
|
||||
// if a single type is not up-to-date, re-send everything
|
||||
if (updatedTypes.isEmpty() == false) {
|
||||
logger.warn("[{}] re-syncing mappings with cluster state because of types [{}]", index, updatedTypes);
|
||||
dirty = true;
|
||||
if (mapper != null) {
|
||||
builder.putMapping(new MappingMetadata(mapper));
|
||||
if (mapper.mappingSource().equals(builder.mapping().source()) == false) {
|
||||
dirty = true;
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
|
|
|
@ -226,7 +226,6 @@ public final class IndexingSlowLog implements IndexingOperationListener {
|
|||
map.put("message", index);
|
||||
map.put("took", TimeValue.timeValueNanos(tookInNanos));
|
||||
map.put("took_millis", "" + TimeUnit.NANOSECONDS.toMillis(tookInNanos));
|
||||
map.put("doc_type", doc.type());
|
||||
map.put("id", doc.id());
|
||||
map.put("routing", doc.routing());
|
||||
|
||||
|
@ -258,7 +257,6 @@ public final class IndexingSlowLog implements IndexingOperationListener {
|
|||
sb.append(index).append(" ");
|
||||
sb.append("took[").append(TimeValue.timeValueNanos(tookInNanos)).append("], ");
|
||||
sb.append("took_millis[").append(TimeUnit.NANOSECONDS.toMillis(tookInNanos)).append("], ");
|
||||
sb.append("type[").append(doc.type()).append("], ");
|
||||
sb.append("id[").append(doc.id()).append("], ");
|
||||
if (doc.routing() == null) {
|
||||
sb.append("routing[]");
|
||||
|
|
|
@ -1389,8 +1389,6 @@ public abstract class Engine implements Closeable {
|
|||
return this.startTime;
|
||||
}
|
||||
|
||||
public abstract String type();
|
||||
|
||||
abstract String id();
|
||||
|
||||
public abstract TYPE operationType();
|
||||
|
@ -1456,11 +1454,6 @@ public abstract class Engine implements Closeable {
|
|||
return this.doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String type() {
|
||||
return this.doc.type();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String id() {
|
||||
return this.doc.id();
|
||||
|
@ -1485,7 +1478,7 @@ public abstract class Engine implements Closeable {
|
|||
|
||||
@Override
|
||||
public int estimatedSizeInBytes() {
|
||||
return (id().length() + type().length()) * 2 + source().length() + 12;
|
||||
return id().length() * 2 + source().length() + 12;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1516,13 +1509,11 @@ public abstract class Engine implements Closeable {
|
|||
|
||||
public static class Delete extends Operation {
|
||||
|
||||
private final String type;
|
||||
private final String id;
|
||||
private final long ifSeqNo;
|
||||
private final long ifPrimaryTerm;
|
||||
|
||||
public Delete(
|
||||
String type,
|
||||
String id,
|
||||
Term uid,
|
||||
long seqNo,
|
||||
|
@ -1540,15 +1531,13 @@ public abstract class Engine implements Closeable {
|
|||
assert ifSeqNo == UNASSIGNED_SEQ_NO || ifSeqNo >= 0 : "ifSeqNo [" + ifSeqNo + "] must be non negative or unset";
|
||||
assert (origin == Origin.PRIMARY) || (ifSeqNo == UNASSIGNED_SEQ_NO && ifPrimaryTerm == UNASSIGNED_PRIMARY_TERM)
|
||||
: "cas operations are only allowed if origin is primary. get [" + origin + "]";
|
||||
this.type = Objects.requireNonNull(type);
|
||||
this.id = Objects.requireNonNull(id);
|
||||
this.ifSeqNo = ifSeqNo;
|
||||
this.ifPrimaryTerm = ifPrimaryTerm;
|
||||
}
|
||||
|
||||
public Delete(String type, String id, Term uid, long primaryTerm) {
|
||||
public Delete(String id, Term uid, long primaryTerm) {
|
||||
this(
|
||||
type,
|
||||
id,
|
||||
uid,
|
||||
UNASSIGNED_SEQ_NO,
|
||||
|
@ -1564,7 +1553,6 @@ public abstract class Engine implements Closeable {
|
|||
|
||||
public Delete(Delete template, VersionType versionType) {
|
||||
this(
|
||||
template.type(),
|
||||
template.id(),
|
||||
template.uid(),
|
||||
template.seqNo(),
|
||||
|
@ -1578,11 +1566,6 @@ public abstract class Engine implements Closeable {
|
|||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String type() {
|
||||
return this.type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String id() {
|
||||
return this.id;
|
||||
|
@ -1625,11 +1608,6 @@ public abstract class Engine implements Closeable {
|
|||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String type() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long version() {
|
||||
throw new UnsupportedOperationException();
|
||||
|
|
|
@ -466,7 +466,7 @@ public final class EngineConfig {
|
|||
/**
|
||||
* Creates a tombstone document for a delete operation.
|
||||
*/
|
||||
ParsedDocument newDeleteTombstoneDoc(String type, String id);
|
||||
ParsedDocument newDeleteTombstoneDoc(String id);
|
||||
|
||||
/**
|
||||
* Creates a tombstone document for a noop operation.
|
||||
|
|
|
@ -1376,15 +1376,13 @@ public class InternalEngine extends Engine {
|
|||
final VersionValue versionValue = versionMap.getVersionForAssert(index.uid().bytes());
|
||||
if (versionValue != null) {
|
||||
if (versionValue.isDelete() == false || allowDeleted == false) {
|
||||
throw new AssertionError(
|
||||
"doc [" + index.type() + "][" + index.id() + "] exists in version map (version " + versionValue + ")"
|
||||
);
|
||||
throw new AssertionError("doc [" + index.id() + "] exists in version map (version " + versionValue + ")");
|
||||
}
|
||||
} else {
|
||||
try (Searcher searcher = acquireSearcher("assert doc doesn't exist", SearcherScope.INTERNAL)) {
|
||||
final long docsWithId = searcher.count(new TermQuery(index.uid()));
|
||||
if (docsWithId > 0) {
|
||||
throw new AssertionError("doc [" + index.type() + "][" + index.id() + "] exists [" + docsWithId + "] times in index");
|
||||
throw new AssertionError("doc [" + index.id() + "] exists [" + docsWithId + "] times in index");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1420,7 +1418,6 @@ public class InternalEngine extends Engine {
|
|||
// generate or register sequence number
|
||||
if (delete.origin() == Operation.Origin.PRIMARY) {
|
||||
delete = new Delete(
|
||||
delete.type(),
|
||||
delete.id(),
|
||||
delete.uid(),
|
||||
generateSeqNoForOperationOnPrimary(delete),
|
||||
|
@ -1608,7 +1605,7 @@ public class InternalEngine extends Engine {
|
|||
private DeleteResult deleteInLucene(Delete delete, DeletionStrategy plan) throws IOException {
|
||||
assert assertMaxSeqNoOfUpdatesIsAdvanced(delete.uid(), delete.seqNo(), false, false);
|
||||
try {
|
||||
final ParsedDocument tombstone = engineConfig.getTombstoneDocSupplier().newDeleteTombstoneDoc(delete.type(), delete.id());
|
||||
final ParsedDocument tombstone = engineConfig.getTombstoneDocSupplier().newDeleteTombstoneDoc(delete.id());
|
||||
assert tombstone.docs().size() == 1 : "Tombstone doc should have single doc [" + tombstone + "]";
|
||||
tombstone.updateSeqID(delete.seqNo(), delete.primaryTerm());
|
||||
tombstone.version().setLongValue(plan.versionOfDeletion);
|
||||
|
|
|
@ -288,10 +288,9 @@ final class LuceneChangesSnapshot implements Translog.Snapshot {
|
|||
assert assertDocSoftDeleted(leaf.reader(), segmentDocID) : "Noop but soft_deletes field is not set [" + op + "]";
|
||||
} else {
|
||||
final String id = fields.uid().id();
|
||||
final String type = fields.uid().type();
|
||||
final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id));
|
||||
if (isTombstone) {
|
||||
op = new Translog.Delete(type, id, uid, seqNo, primaryTerm, version);
|
||||
op = new Translog.Delete(id, uid, seqNo, primaryTerm, version);
|
||||
assert assertDocSoftDeleted(leaf.reader(), segmentDocID) : "Delete op but soft_deletes field is not set [" + op + "]";
|
||||
} else {
|
||||
final BytesReference source = fields.source();
|
||||
|
@ -310,7 +309,6 @@ final class LuceneChangesSnapshot implements Translog.Snapshot {
|
|||
// TODO: pass the latest timestamp from engine.
|
||||
final long autoGeneratedIdTimestamp = -1;
|
||||
op = new Translog.Index(
|
||||
type,
|
||||
id,
|
||||
seqNo,
|
||||
primaryTerm,
|
||||
|
|
|
@ -295,7 +295,6 @@ public final class ShardGetService extends AbstractIndexShardComponent {
|
|||
assert source != null : "original source in translog must exist";
|
||||
SourceToParse sourceToParse = new SourceToParse(
|
||||
shardId.getIndexName(),
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
id,
|
||||
source,
|
||||
XContentHelper.xContentType(source),
|
||||
|
|
|
@ -252,14 +252,14 @@ public class DocumentMapper implements ToXContentFragment {
|
|||
return documentParser.parseDocument(source, mapping.metadataMappers);
|
||||
}
|
||||
|
||||
public ParsedDocument createDeleteTombstoneDoc(String index, String type, String id) throws MapperParsingException {
|
||||
final SourceToParse emptySource = new SourceToParse(index, type, id, new BytesArray("{}"), XContentType.JSON);
|
||||
public ParsedDocument createDeleteTombstoneDoc(String index, String id) throws MapperParsingException {
|
||||
final SourceToParse emptySource = new SourceToParse(index, id, new BytesArray("{}"), XContentType.JSON);
|
||||
return documentParser.parseDocument(emptySource, deleteTombstoneMetadataFieldMappers).toTombstone();
|
||||
}
|
||||
|
||||
public ParsedDocument createNoopTombstoneDoc(String index, String reason) throws MapperParsingException {
|
||||
final String id = ""; // _id won't be used.
|
||||
final SourceToParse sourceToParse = new SourceToParse(index, type, id, new BytesArray("{}"), XContentType.JSON);
|
||||
final SourceToParse sourceToParse = new SourceToParse(index, id, new BytesArray("{}"), XContentType.JSON);
|
||||
final ParsedDocument parsedDoc = documentParser.parseDocument(sourceToParse, noopTombstoneMetadataFieldMappers).toTombstone();
|
||||
// Store the reason of a noop as a raw string in the _source field
|
||||
final BytesRef byteRef = new BytesRef(reason);
|
||||
|
|
|
@ -53,7 +53,6 @@ import java.util.ArrayList;
|
|||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.opensearch.index.mapper.FieldMapper.IGNORE_MALFORMED_SETTING;
|
||||
|
||||
|
@ -71,8 +70,6 @@ final class DocumentParser {
|
|||
}
|
||||
|
||||
ParsedDocument parseDocument(SourceToParse source, MetadataFieldMapper[] metadataFieldsMappers) throws MapperParsingException {
|
||||
validateType(source);
|
||||
|
||||
final Mapping mapping = docMapper.mapping();
|
||||
final ParseContext.InternalParseContext context;
|
||||
final XContentType xContentType = source.getXContentType();
|
||||
|
@ -140,17 +137,6 @@ final class DocumentParser {
|
|||
}
|
||||
}
|
||||
|
||||
private void validateType(SourceToParse source) {
|
||||
if (Objects.equals(source.type(), docMapper.type()) == false && MapperService.SINGLE_MAPPING_NAME.equals(source.type()) == false) { // used
|
||||
// by
|
||||
// typeless
|
||||
// APIs
|
||||
throw new MapperParsingException(
|
||||
"Type mismatch, provide type [" + source.type() + "] but mapper is of type [" + docMapper.type() + "]"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private static void validateStart(XContentParser parser) throws IOException {
|
||||
// will result in START_OBJECT
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
|
@ -189,7 +175,6 @@ final class DocumentParser {
|
|||
context.version(),
|
||||
context.seqID(),
|
||||
context.sourceToParse().id(),
|
||||
context.sourceToParse().type(),
|
||||
source.routing(),
|
||||
context.docs(),
|
||||
context.sourceToParse().source(),
|
||||
|
|
|
@ -47,7 +47,7 @@ public class ParsedDocument {
|
|||
|
||||
private final Field version;
|
||||
|
||||
private final String id, type;
|
||||
private final String id;
|
||||
private final SeqNoFieldMapper.SequenceIDFields seqID;
|
||||
|
||||
private final String routing;
|
||||
|
@ -63,7 +63,6 @@ public class ParsedDocument {
|
|||
Field version,
|
||||
SeqNoFieldMapper.SequenceIDFields seqID,
|
||||
String id,
|
||||
String type,
|
||||
String routing,
|
||||
List<Document> documents,
|
||||
BytesReference source,
|
||||
|
@ -73,7 +72,6 @@ public class ParsedDocument {
|
|||
this.version = version;
|
||||
this.seqID = seqID;
|
||||
this.id = id;
|
||||
this.type = type;
|
||||
this.routing = routing;
|
||||
this.documents = documents;
|
||||
this.source = source;
|
||||
|
@ -85,10 +83,6 @@ public class ParsedDocument {
|
|||
return this.id;
|
||||
}
|
||||
|
||||
public String type() {
|
||||
return this.type;
|
||||
}
|
||||
|
||||
public Field version() {
|
||||
return version;
|
||||
}
|
||||
|
|
|
@ -45,17 +45,14 @@ public class SourceToParse {
|
|||
|
||||
private final String index;
|
||||
|
||||
private final String type;
|
||||
|
||||
private final String id;
|
||||
|
||||
private final @Nullable String routing;
|
||||
|
||||
private final XContentType xContentType;
|
||||
|
||||
public SourceToParse(String index, String type, String id, BytesReference source, XContentType xContentType, @Nullable String routing) {
|
||||
public SourceToParse(String index, String id, BytesReference source, XContentType xContentType, @Nullable String routing) {
|
||||
this.index = Objects.requireNonNull(index);
|
||||
this.type = Objects.requireNonNull(type);
|
||||
this.id = Objects.requireNonNull(id);
|
||||
// we always convert back to byte array, since we store it and Field only supports bytes..
|
||||
// so, we might as well do it here, and improve the performance of working with direct byte arrays
|
||||
|
@ -64,8 +61,8 @@ public class SourceToParse {
|
|||
this.routing = routing;
|
||||
}
|
||||
|
||||
public SourceToParse(String index, String type, String id, BytesReference source, XContentType xContentType) {
|
||||
this(index, type, id, source, xContentType, null);
|
||||
public SourceToParse(String index, String id, BytesReference source, XContentType xContentType) {
|
||||
this(index, id, source, xContentType, null);
|
||||
}
|
||||
|
||||
public BytesReference source() {
|
||||
|
@ -76,10 +73,6 @@ public class SourceToParse {
|
|||
return this.index;
|
||||
}
|
||||
|
||||
public String type() {
|
||||
return this.type;
|
||||
}
|
||||
|
||||
public String id() {
|
||||
return this.id;
|
||||
}
|
||||
|
|
|
@ -186,7 +186,7 @@ public class TypeFieldMapper extends MetadataFieldMapper {
|
|||
if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.stored()) {
|
||||
return;
|
||||
}
|
||||
context.doc().add(new Field(fieldType().name(), context.sourceToParse().type(), fieldType));
|
||||
context.doc().add(new Field(fieldType().name(), MapperService.SINGLE_MAPPING_NAME, fieldType));
|
||||
if (fieldType().hasDocValues()) {
|
||||
context.doc().add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(MapperService.SINGLE_MAPPING_NAME)));
|
||||
}
|
||||
|
|
|
@ -122,7 +122,6 @@ import org.opensearch.index.get.ShardGetService;
|
|||
import org.opensearch.index.mapper.DocumentMapper;
|
||||
import org.opensearch.index.mapper.DocumentMapperForType;
|
||||
import org.opensearch.index.mapper.IdFieldMapper;
|
||||
import org.opensearch.index.mapper.MapperParsingException;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
import org.opensearch.index.mapper.Mapping;
|
||||
import org.opensearch.index.mapper.ParsedDocument;
|
||||
|
@ -154,7 +153,6 @@ import org.opensearch.index.warmer.ShardIndexWarmerService;
|
|||
import org.opensearch.index.warmer.WarmerStats;
|
||||
import org.opensearch.indices.IndexingMemoryController;
|
||||
import org.opensearch.indices.IndicesService;
|
||||
import org.opensearch.indices.TypeMissingException;
|
||||
import org.opensearch.indices.breaker.CircuitBreakerService;
|
||||
import org.opensearch.indices.cluster.IndicesClusterStateService;
|
||||
import org.opensearch.indices.recovery.PeerRecoveryTargetService;
|
||||
|
@ -867,23 +865,9 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
ensureWriteAllowed(origin);
|
||||
Engine.Index operation;
|
||||
try {
|
||||
final String resolvedType = mapperService.resolveDocumentType(sourceToParse.type());
|
||||
final SourceToParse sourceWithResolvedType;
|
||||
if (resolvedType.equals(sourceToParse.type())) {
|
||||
sourceWithResolvedType = sourceToParse;
|
||||
} else {
|
||||
sourceWithResolvedType = new SourceToParse(
|
||||
sourceToParse.index(),
|
||||
resolvedType,
|
||||
sourceToParse.id(),
|
||||
sourceToParse.source(),
|
||||
sourceToParse.getXContentType(),
|
||||
sourceToParse.routing()
|
||||
);
|
||||
}
|
||||
operation = prepareIndex(
|
||||
docMapper(),
|
||||
sourceWithResolvedType,
|
||||
sourceToParse,
|
||||
seqNo,
|
||||
opPrimaryTerm,
|
||||
version,
|
||||
|
@ -953,8 +937,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
if (logger.isTraceEnabled()) {
|
||||
// don't use index.source().utf8ToString() here source might not be valid UTF-8
|
||||
logger.trace(
|
||||
"index [{}][{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}]",
|
||||
index.type(),
|
||||
"index [{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}]",
|
||||
index.id(),
|
||||
index.seqNo(),
|
||||
routingEntry().allocationId(),
|
||||
|
@ -966,9 +949,8 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
result = engine.index(index);
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace(
|
||||
"index-done [{}][{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}] "
|
||||
"index-done [{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}] "
|
||||
+ "result-seq# [{}] result-term [{}] failure [{}]",
|
||||
index.type(),
|
||||
index.id(),
|
||||
index.seqNo(),
|
||||
routingEntry().allocationId(),
|
||||
|
@ -984,8 +966,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
if (logger.isTraceEnabled()) {
|
||||
logger.trace(
|
||||
new ParameterizedMessage(
|
||||
"index-fail [{}][{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}]",
|
||||
index.type(),
|
||||
"index-fail [{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}]",
|
||||
index.id(),
|
||||
index.seqNo(),
|
||||
routingEntry().allocationId(),
|
||||
|
@ -1038,7 +1019,6 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
|
||||
public Engine.DeleteResult applyDeleteOperationOnPrimary(
|
||||
long version,
|
||||
String type,
|
||||
String id,
|
||||
VersionType versionType,
|
||||
long ifSeqNo,
|
||||
|
@ -1050,7 +1030,6 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
UNASSIGNED_SEQ_NO,
|
||||
getOperationPrimaryTerm(),
|
||||
version,
|
||||
type,
|
||||
id,
|
||||
versionType,
|
||||
ifSeqNo,
|
||||
|
@ -1059,14 +1038,12 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
);
|
||||
}
|
||||
|
||||
public Engine.DeleteResult applyDeleteOperationOnReplica(long seqNo, long opPrimaryTerm, long version, String type, String id)
|
||||
throws IOException {
|
||||
public Engine.DeleteResult applyDeleteOperationOnReplica(long seqNo, long opPrimaryTerm, long version, String id) throws IOException {
|
||||
return applyDeleteOperation(
|
||||
getEngine(),
|
||||
seqNo,
|
||||
opPrimaryTerm,
|
||||
version,
|
||||
type,
|
||||
id,
|
||||
null,
|
||||
UNASSIGNED_SEQ_NO,
|
||||
|
@ -1080,7 +1057,6 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
long seqNo,
|
||||
long opPrimaryTerm,
|
||||
long version,
|
||||
String type,
|
||||
String id,
|
||||
@Nullable VersionType versionType,
|
||||
long ifSeqNo,
|
||||
|
@ -1093,52 +1069,12 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
+ getOperationPrimaryTerm()
|
||||
+ "]";
|
||||
ensureWriteAllowed(origin);
|
||||
// When there is a single type, the unique identifier is only composed of the _id,
|
||||
// so there is no way to differentiate foo#1 from bar#1. This is especially an issue
|
||||
// if a user first deletes foo#1 and then indexes bar#1: since we do not encode the
|
||||
// _type in the uid it might look like we are reindexing the same document, which
|
||||
// would fail if bar#1 is indexed with a lower version than foo#1 was deleted with.
|
||||
// In order to work around this issue, we make deletions create types. This way, we
|
||||
// fail if index and delete operations do not use the same type.
|
||||
// TODO: clean this up when types are gone
|
||||
try {
|
||||
Mapping update = docMapper().getMapping();
|
||||
if (update != null) {
|
||||
return new Engine.DeleteResult(update);
|
||||
}
|
||||
} catch (MapperParsingException | IllegalArgumentException | TypeMissingException e) {
|
||||
return new Engine.DeleteResult(e, version, getOperationPrimaryTerm(), seqNo, false);
|
||||
}
|
||||
if (mapperService.resolveDocumentType(type).equals(mapperService.documentMapper().type()) == false) {
|
||||
// We should never get there due to the fact that we generate mapping updates on deletes,
|
||||
// but we still prefer to have a hard exception here as we would otherwise delete a
|
||||
// document in the wrong type.
|
||||
throw new IllegalStateException(
|
||||
"Deleting document from type ["
|
||||
+ mapperService.resolveDocumentType(type)
|
||||
+ "] while current type is ["
|
||||
+ mapperService.documentMapper().type()
|
||||
+ "]"
|
||||
);
|
||||
}
|
||||
final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id));
|
||||
final Engine.Delete delete = prepareDelete(
|
||||
type,
|
||||
id,
|
||||
uid,
|
||||
seqNo,
|
||||
opPrimaryTerm,
|
||||
version,
|
||||
versionType,
|
||||
origin,
|
||||
ifSeqNo,
|
||||
ifPrimaryTerm
|
||||
);
|
||||
final Engine.Delete delete = prepareDelete(id, uid, seqNo, opPrimaryTerm, version, versionType, origin, ifSeqNo, ifPrimaryTerm);
|
||||
return delete(engine, delete);
|
||||
}
|
||||
|
||||
private Engine.Delete prepareDelete(
|
||||
String type,
|
||||
String id,
|
||||
Term uid,
|
||||
long seqNo,
|
||||
|
@ -1150,19 +1086,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
long ifPrimaryTerm
|
||||
) {
|
||||
long startTime = System.nanoTime();
|
||||
return new Engine.Delete(
|
||||
mapperService.resolveDocumentType(type),
|
||||
id,
|
||||
uid,
|
||||
seqNo,
|
||||
primaryTerm,
|
||||
version,
|
||||
versionType,
|
||||
origin,
|
||||
startTime,
|
||||
ifSeqNo,
|
||||
ifPrimaryTerm
|
||||
);
|
||||
return new Engine.Delete(id, uid, seqNo, primaryTerm, version, versionType, origin, startTime, ifSeqNo, ifPrimaryTerm);
|
||||
}
|
||||
|
||||
private Engine.DeleteResult delete(Engine engine, Engine.Delete delete) throws IOException {
|
||||
|
@ -1813,7 +1737,6 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
origin,
|
||||
new SourceToParse(
|
||||
shardId.getIndexName(),
|
||||
index.type(),
|
||||
index.id(),
|
||||
index.source(),
|
||||
XContentHelper.xContentType(index.source()),
|
||||
|
@ -1828,7 +1751,6 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
delete.seqNo(),
|
||||
delete.primaryTerm(),
|
||||
delete.version(),
|
||||
delete.type(),
|
||||
delete.id(),
|
||||
versionType,
|
||||
UNASSIGNED_SEQ_NO,
|
||||
|
@ -3873,8 +3795,8 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
|
|||
: null;
|
||||
return new EngineConfig.TombstoneDocSupplier() {
|
||||
@Override
|
||||
public ParsedDocument newDeleteTombstoneDoc(String type, String id) {
|
||||
return docMapper().getDocumentMapper().createDeleteTombstoneDoc(shardId.getIndexName(), type, id);
|
||||
public ParsedDocument newDeleteTombstoneDoc(String id) {
|
||||
return docMapper().getDocumentMapper().createDeleteTombstoneDoc(shardId.getIndexName(), id);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -391,7 +391,7 @@ public class TermVectorsService {
|
|||
MapperService mapperService = indexShard.mapperService();
|
||||
DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate();
|
||||
ParsedDocument parsedDocument = docMapper.getDocumentMapper()
|
||||
.parse(new SourceToParse(index, MapperService.SINGLE_MAPPING_NAME, "_id_for_tv_api", doc, xContentType, routing));
|
||||
.parse(new SourceToParse(index, "_id_for_tv_api", doc, xContentType, routing));
|
||||
if (docMapper.getMapping() != null) {
|
||||
parsedDocument.addDynamicMappingsUpdate(docMapper.getMapping());
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ package org.opensearch.index.translog;
|
|||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.opensearch.LegacyESVersion;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.common.Nullable;
|
||||
import org.opensearch.common.Strings;
|
||||
import org.opensearch.common.UUIDs;
|
||||
|
@ -54,6 +54,7 @@ import org.opensearch.index.IndexSettings;
|
|||
import org.opensearch.index.VersionType;
|
||||
import org.opensearch.index.engine.Engine;
|
||||
import org.opensearch.index.engine.MissingHistoryOperationsException;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
import org.opensearch.index.seqno.SequenceNumbers;
|
||||
import org.opensearch.index.shard.AbstractIndexShardComponent;
|
||||
import org.opensearch.index.shard.IndexShardComponent;
|
||||
|
@ -1192,11 +1193,10 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
public static final int FORMAT_6_0 = 8; // since 6.0.0
|
||||
public static final int FORMAT_NO_PARENT = FORMAT_6_0 + 1; // since 7.0
|
||||
public static final int FORMAT_NO_VERSION_TYPE = FORMAT_NO_PARENT + 1;
|
||||
public static final int SERIALIZATION_FORMAT = FORMAT_NO_VERSION_TYPE;
|
||||
|
||||
public static final int FORMAT_NO_DOC_TYPE = FORMAT_NO_VERSION_TYPE + 1;
|
||||
public static final int SERIALIZATION_FORMAT = FORMAT_NO_DOC_TYPE;
|
||||
private final String id;
|
||||
private final long autoGeneratedIdTimestamp;
|
||||
private final String type;
|
||||
private final long seqNo;
|
||||
private final long primaryTerm;
|
||||
private final long version;
|
||||
|
@ -1207,7 +1207,10 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
final int format = in.readVInt(); // SERIALIZATION_FORMAT
|
||||
assert format >= FORMAT_6_0 : "format was: " + format;
|
||||
id = in.readString();
|
||||
type = in.readString();
|
||||
if (format < FORMAT_NO_DOC_TYPE) {
|
||||
in.readString();
|
||||
// can't assert that this is _doc because pre 2.0 indexes can have any name for a type
|
||||
}
|
||||
source = in.readBytesReference();
|
||||
routing = in.readOptionalString();
|
||||
if (format < FORMAT_NO_PARENT) {
|
||||
|
@ -1224,7 +1227,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
|
||||
public Index(Engine.Index index, Engine.IndexResult indexResult) {
|
||||
this.id = index.id();
|
||||
this.type = index.type();
|
||||
this.source = index.source();
|
||||
this.routing = index.routing();
|
||||
this.seqNo = indexResult.getSeqNo();
|
||||
|
@ -1233,21 +1235,11 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
this.autoGeneratedIdTimestamp = index.getAutoGeneratedIdTimestamp();
|
||||
}
|
||||
|
||||
public Index(String type, String id, long seqNo, long primaryTerm, byte[] source) {
|
||||
this(type, id, seqNo, primaryTerm, Versions.MATCH_ANY, source, null, -1);
|
||||
public Index(String id, long seqNo, long primaryTerm, byte[] source) {
|
||||
this(id, seqNo, primaryTerm, Versions.MATCH_ANY, source, null, -1);
|
||||
}
|
||||
|
||||
public Index(
|
||||
String type,
|
||||
String id,
|
||||
long seqNo,
|
||||
long primaryTerm,
|
||||
long version,
|
||||
byte[] source,
|
||||
String routing,
|
||||
long autoGeneratedIdTimestamp
|
||||
) {
|
||||
this.type = type;
|
||||
public Index(String id, long seqNo, long primaryTerm, long version, byte[] source, String routing, long autoGeneratedIdTimestamp) {
|
||||
this.id = id;
|
||||
this.source = new BytesArray(source);
|
||||
this.seqNo = seqNo;
|
||||
|
@ -1264,12 +1256,10 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
|
||||
@Override
|
||||
public long estimateSize() {
|
||||
return (2 * id.length()) + (2 * type.length()) + source.length() + (routing != null ? 2 * routing.length() : 0) + (4
|
||||
* Long.BYTES); // timestamp, seq_no, primary_term, and version
|
||||
}
|
||||
|
||||
public String type() {
|
||||
return this.type;
|
||||
return (2 * id.length()) + source.length() + (routing != null ? 2 * routing.length() : 0) + (4 * Long.BYTES); // timestamp,
|
||||
// seq_no,
|
||||
// primary_term,
|
||||
// and version
|
||||
}
|
||||
|
||||
public String id() {
|
||||
|
@ -1304,10 +1294,12 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
}
|
||||
|
||||
private void write(final StreamOutput out) throws IOException {
|
||||
final int format = out.getVersion().onOrAfter(LegacyESVersion.V_7_0_0) ? SERIALIZATION_FORMAT : FORMAT_6_0;
|
||||
final int format = out.getVersion().onOrAfter(Version.V_2_0_0) ? SERIALIZATION_FORMAT : FORMAT_NO_VERSION_TYPE;
|
||||
out.writeVInt(format);
|
||||
out.writeString(id);
|
||||
out.writeString(type);
|
||||
if (format < FORMAT_NO_DOC_TYPE) {
|
||||
out.writeString(MapperService.SINGLE_MAPPING_NAME);
|
||||
}
|
||||
out.writeBytesReference(source);
|
||||
out.writeOptionalString(routing);
|
||||
if (format < FORMAT_NO_PARENT) {
|
||||
|
@ -1337,7 +1329,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
|| seqNo != index.seqNo
|
||||
|| primaryTerm != index.primaryTerm
|
||||
|| id.equals(index.id) == false
|
||||
|| type.equals(index.type) == false
|
||||
|| autoGeneratedIdTimestamp != index.autoGeneratedIdTimestamp
|
||||
|| source.equals(index.source) == false) {
|
||||
return false;
|
||||
|
@ -1352,7 +1343,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
@Override
|
||||
public int hashCode() {
|
||||
int result = id.hashCode();
|
||||
result = 31 * result + type.hashCode();
|
||||
result = 31 * result + Long.hashCode(seqNo);
|
||||
result = 31 * result + Long.hashCode(primaryTerm);
|
||||
result = 31 * result + Long.hashCode(version);
|
||||
|
@ -1368,9 +1358,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
+ "id='"
|
||||
+ id
|
||||
+ '\''
|
||||
+ ", type='"
|
||||
+ type
|
||||
+ '\''
|
||||
+ ", seqNo="
|
||||
+ seqNo
|
||||
+ ", primaryTerm="
|
||||
|
@ -1393,9 +1380,10 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
private static final int FORMAT_6_0 = 4; // 6.0 - *
|
||||
public static final int FORMAT_NO_PARENT = FORMAT_6_0 + 1; // since 7.0
|
||||
public static final int FORMAT_NO_VERSION_TYPE = FORMAT_NO_PARENT + 1;
|
||||
public static final int SERIALIZATION_FORMAT = FORMAT_NO_VERSION_TYPE;
|
||||
public static final int FORMAT_NO_DOC_TYPE = FORMAT_NO_VERSION_TYPE + 1;
|
||||
public static final int SERIALIZATION_FORMAT = FORMAT_NO_DOC_TYPE;
|
||||
|
||||
private final String type, id;
|
||||
private final String id;
|
||||
private final Term uid;
|
||||
private final long seqNo;
|
||||
private final long primaryTerm;
|
||||
|
@ -1404,7 +1392,10 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
private Delete(final StreamInput in) throws IOException {
|
||||
final int format = in.readVInt();// SERIALIZATION_FORMAT
|
||||
assert format >= FORMAT_6_0 : "format was: " + format;
|
||||
type = in.readString();
|
||||
if (format < FORMAT_NO_DOC_TYPE) {
|
||||
in.readString();
|
||||
// Can't assert that this is _doc because pre 2.0 indexes can have any name for a type
|
||||
}
|
||||
id = in.readString();
|
||||
uid = new Term(in.readString(), in.readBytesRef());
|
||||
this.version = in.readLong();
|
||||
|
@ -1416,16 +1407,15 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
}
|
||||
|
||||
public Delete(Engine.Delete delete, Engine.DeleteResult deleteResult) {
|
||||
this(delete.type(), delete.id(), delete.uid(), deleteResult.getSeqNo(), delete.primaryTerm(), deleteResult.getVersion());
|
||||
this(delete.id(), delete.uid(), deleteResult.getSeqNo(), delete.primaryTerm(), deleteResult.getVersion());
|
||||
}
|
||||
|
||||
/** utility for testing */
|
||||
public Delete(String type, String id, long seqNo, long primaryTerm, Term uid) {
|
||||
this(type, id, uid, seqNo, primaryTerm, Versions.MATCH_ANY);
|
||||
public Delete(String id, long seqNo, long primaryTerm, Term uid) {
|
||||
this(id, uid, seqNo, primaryTerm, Versions.MATCH_ANY);
|
||||
}
|
||||
|
||||
public Delete(String type, String id, Term uid, long seqNo, long primaryTerm, long version) {
|
||||
this.type = Objects.requireNonNull(type);
|
||||
public Delete(String id, Term uid, long seqNo, long primaryTerm, long version) {
|
||||
this.id = Objects.requireNonNull(id);
|
||||
this.uid = uid;
|
||||
this.seqNo = seqNo;
|
||||
|
@ -1440,12 +1430,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
|
||||
@Override
|
||||
public long estimateSize() {
|
||||
return (id.length() * 2) + (type.length() * 2) + ((uid.field().length() * 2) + (uid.text().length()) * 2) + (type.length() * 2)
|
||||
+ (3 * Long.BYTES); // seq_no, primary_term, and version;
|
||||
}
|
||||
|
||||
public String type() {
|
||||
return type;
|
||||
return (id.length() * 2) + ((uid.field().length() * 2) + (uid.text().length()) * 2) + (3 * Long.BYTES); // seq_no, primary_term,
|
||||
// and version;
|
||||
}
|
||||
|
||||
public String id() {
|
||||
|
@ -1476,9 +1462,11 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
}
|
||||
|
||||
private void write(final StreamOutput out) throws IOException {
|
||||
final int format = out.getVersion().onOrAfter(LegacyESVersion.V_7_0_0) ? SERIALIZATION_FORMAT : FORMAT_6_0;
|
||||
final int format = out.getVersion().onOrAfter(Version.V_2_0_0) ? SERIALIZATION_FORMAT : FORMAT_NO_VERSION_TYPE;
|
||||
out.writeVInt(format);
|
||||
out.writeString(type);
|
||||
if (format < FORMAT_NO_DOC_TYPE) {
|
||||
out.writeString(MapperService.SINGLE_MAPPING_NAME);
|
||||
}
|
||||
out.writeString(id);
|
||||
out.writeString(uid.field());
|
||||
out.writeBytesRef(uid.bytes());
|
||||
|
|
|
@ -283,7 +283,6 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
|||
final Translog.Index o1 = (Translog.Index) prvOp;
|
||||
final Translog.Index o2 = (Translog.Index) newOp;
|
||||
sameOp = Objects.equals(o1.id(), o2.id())
|
||||
&& Objects.equals(o1.type(), o2.type())
|
||||
&& Objects.equals(o1.source(), o2.source())
|
||||
&& Objects.equals(o1.routing(), o2.routing())
|
||||
&& o1.primaryTerm() == o2.primaryTerm()
|
||||
|
@ -293,7 +292,6 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
|||
final Translog.Delete o1 = (Translog.Delete) newOp;
|
||||
final Translog.Delete o2 = (Translog.Delete) prvOp;
|
||||
sameOp = Objects.equals(o1.id(), o2.id())
|
||||
&& Objects.equals(o1.type(), o2.type())
|
||||
&& o1.primaryTerm() == o2.primaryTerm()
|
||||
&& o1.seqNo() == o2.seqNo()
|
||||
&& o1.version() == o2.version();
|
||||
|
|
|
@ -109,7 +109,6 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
private IndexMetadata indexMetadata() throws IOException {
|
||||
return IndexMetadata.builder("index")
|
||||
.putMapping(
|
||||
"_doc",
|
||||
"{\"properties\":{\"foo\":{\"type\":\"text\",\"fields\":" + "{\"keyword\":{\"type\":\"keyword\",\"ignore_above\":256}}}}}"
|
||||
)
|
||||
.settings(idxSettings)
|
||||
|
@ -711,7 +710,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
final long resultSeqNo = 13;
|
||||
Engine.DeleteResult deleteResult = new FakeDeleteResult(1, 1, resultSeqNo, found, resultLocation);
|
||||
IndexShard shard = mock(IndexShard.class);
|
||||
when(shard.applyDeleteOperationOnPrimary(anyLong(), any(), any(), any(), anyLong(), anyLong())).thenReturn(deleteResult);
|
||||
when(shard.applyDeleteOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong())).thenReturn(deleteResult);
|
||||
when(shard.indexSettings()).thenReturn(indexSettings);
|
||||
when(shard.shardId()).thenReturn(shardId);
|
||||
|
||||
|
|
|
@ -127,7 +127,6 @@ public class TransportMultiGetActionTests extends OpenSearchTestCase {
|
|||
.put(IndexMetadata.SETTING_INDEX_UUID, index1.getUUID())
|
||||
)
|
||||
.putMapping(
|
||||
"_doc",
|
||||
XContentHelper.convertToJson(
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -153,7 +152,6 @@ public class TransportMultiGetActionTests extends OpenSearchTestCase {
|
|||
.put(IndexMetadata.SETTING_INDEX_UUID, index1.getUUID())
|
||||
)
|
||||
.putMapping(
|
||||
"_doc",
|
||||
XContentHelper.convertToJson(
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
|
|
@ -48,7 +48,7 @@ public class ResyncReplicationRequestTests extends OpenSearchTestCase {
|
|||
|
||||
public void testSerialization() throws IOException {
|
||||
final byte[] bytes = "{}".getBytes(Charset.forName("UTF-8"));
|
||||
final Translog.Index index = new Translog.Index("type", "id", 0, randomNonNegativeLong(), randomNonNegativeLong(), bytes, null, -1);
|
||||
final Translog.Index index = new Translog.Index("id", 0, randomNonNegativeLong(), randomNonNegativeLong(), bytes, null, -1);
|
||||
final ShardId shardId = new ShardId(new Index("index", "uuid"), 0);
|
||||
final ResyncReplicationRequest before = new ResyncReplicationRequest(shardId, 42L, 100, new Translog.Operation[] { index });
|
||||
|
||||
|
|
|
@ -206,7 +206,7 @@ public class TransportResyncReplicationActionTests extends OpenSearchTestCase {
|
|||
shardId,
|
||||
42L,
|
||||
100,
|
||||
new Translog.Operation[] { new Translog.Index("type", "id", 0, primaryTerm, 0L, bytes, null, -1) }
|
||||
new Translog.Operation[] { new Translog.Index("id", 0, primaryTerm, 0L, bytes, null, -1) }
|
||||
);
|
||||
|
||||
final PlainActionFuture<ResyncReplicationResponse> listener = new PlainActionFuture<>();
|
||||
|
|
|
@ -170,7 +170,7 @@ public class TransportReplicationAllPermitsAcquisitionTests extends IndexShardTe
|
|||
IndexMetadata indexMetadata = IndexMetadata.builder(shardId.getIndexName())
|
||||
.settings(indexSettings)
|
||||
.primaryTerm(shardId.id(), primary.getOperationPrimaryTerm())
|
||||
.putMapping("_doc", "{ \"properties\": { \"value\": { \"type\": \"short\"}}}")
|
||||
.putMapping("{ \"properties\": { \"value\": { \"type\": \"short\"}}}")
|
||||
.build();
|
||||
state.metadata(Metadata.builder().put(indexMetadata, false).generateClusterUuidIfNeeded());
|
||||
|
||||
|
|
|
@ -128,7 +128,6 @@ public class TransportMultiTermVectorsActionTests extends OpenSearchTestCase {
|
|||
.put(IndexMetadata.SETTING_INDEX_UUID, index1.getUUID())
|
||||
)
|
||||
.putMapping(
|
||||
"_doc",
|
||||
XContentHelper.convertToJson(
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -154,7 +153,6 @@ public class TransportMultiTermVectorsActionTests extends OpenSearchTestCase {
|
|||
.put(IndexMetadata.SETTING_INDEX_UUID, index1.getUUID())
|
||||
)
|
||||
.putMapping(
|
||||
"_doc",
|
||||
XContentHelper.convertToJson(
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
|
|
@ -240,7 +240,7 @@ public class MetadataCreateDataStreamServiceTests extends OpenSearchTestCase {
|
|||
.put(request.settings())
|
||||
.build()
|
||||
)
|
||||
.putMapping("_doc", generateMapping("@timestamp"))
|
||||
.putMapping(generateMapping("@timestamp"))
|
||||
.numberOfShards(1)
|
||||
.numberOfReplicas(1)
|
||||
.build(),
|
||||
|
|
|
@ -644,7 +644,7 @@ public class MetadataTests extends OpenSearchTestCase {
|
|||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
)
|
||||
.putMapping("_doc", FIND_MAPPINGS_TEST_ITEM)
|
||||
.putMapping(FIND_MAPPINGS_TEST_ITEM)
|
||||
)
|
||||
.put(
|
||||
IndexMetadata.builder("index2")
|
||||
|
@ -654,7 +654,7 @@ public class MetadataTests extends OpenSearchTestCase {
|
|||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
)
|
||||
.putMapping("_doc", FIND_MAPPINGS_TEST_ITEM)
|
||||
.putMapping(FIND_MAPPINGS_TEST_ITEM)
|
||||
)
|
||||
.build();
|
||||
|
||||
|
@ -739,7 +739,7 @@ public class MetadataTests extends OpenSearchTestCase {
|
|||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
)
|
||||
.putMapping("_doc", mapping)
|
||||
.putMapping(mapping)
|
||||
)
|
||||
.put(
|
||||
IndexMetadata.builder("index2")
|
||||
|
@ -749,7 +749,7 @@ public class MetadataTests extends OpenSearchTestCase {
|
|||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
)
|
||||
.putMapping("_doc", mapping)
|
||||
.putMapping(mapping)
|
||||
)
|
||||
.put(
|
||||
IndexMetadata.builder("index3")
|
||||
|
@ -759,7 +759,7 @@ public class MetadataTests extends OpenSearchTestCase {
|
|||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
|
||||
)
|
||||
.putMapping("_doc", mapping)
|
||||
.putMapping(mapping)
|
||||
)
|
||||
.build();
|
||||
|
||||
|
|
|
@ -112,8 +112,7 @@ public class ToAndFromJsonMetadataTests extends OpenSearchTestCase {
|
|||
.creationDate(2L)
|
||||
.numberOfShards(1)
|
||||
.numberOfReplicas(2)
|
||||
.putMapping("mapping1", MAPPING_SOURCE1)
|
||||
.putMapping("mapping2", MAPPING_SOURCE2)
|
||||
.putMapping(MAPPING_SOURCE1)
|
||||
.putAlias(newAliasMetadataBuilder("alias1").filter(ALIAS_FILTER1))
|
||||
.putAlias(newAliasMetadataBuilder("alias3").writeIndex(randomBoolean() ? null : randomBoolean()))
|
||||
.putAlias(newAliasMetadataBuilder("alias4").filter(ALIAS_FILTER2))
|
||||
|
|
|
@ -223,7 +223,6 @@ public class IndexingSlowLogTests extends OpenSearchTestCase {
|
|||
new NumericDocValuesField("version", 1),
|
||||
SeqNoFieldMapper.SequenceIDFields.emptySeqID(),
|
||||
"id",
|
||||
"test",
|
||||
"routingValue",
|
||||
null,
|
||||
source,
|
||||
|
@ -237,7 +236,6 @@ public class IndexingSlowLogTests extends OpenSearchTestCase {
|
|||
assertThat(p.getValueFor("message"), equalTo("[foo/123]"));
|
||||
assertThat(p.getValueFor("took"), equalTo("10nanos"));
|
||||
assertThat(p.getValueFor("took_millis"), equalTo("0"));
|
||||
assertThat(p.getValueFor("doc_type"), equalTo("test"));
|
||||
assertThat(p.getValueFor("id"), equalTo("id"));
|
||||
assertThat(p.getValueFor("routing"), equalTo("routingValue"));
|
||||
assertThat(p.getValueFor("source"), is(emptyOrNullString()));
|
||||
|
@ -253,7 +251,6 @@ public class IndexingSlowLogTests extends OpenSearchTestCase {
|
|||
new NumericDocValuesField("version", 1),
|
||||
SeqNoFieldMapper.SequenceIDFields.emptySeqID(),
|
||||
"id",
|
||||
"test",
|
||||
null,
|
||||
null,
|
||||
source,
|
||||
|
@ -284,7 +281,6 @@ public class IndexingSlowLogTests extends OpenSearchTestCase {
|
|||
new NumericDocValuesField("version", 1),
|
||||
SeqNoFieldMapper.SequenceIDFields.emptySeqID(),
|
||||
"id",
|
||||
"test",
|
||||
null,
|
||||
null,
|
||||
source,
|
||||
|
|
|
@ -124,16 +124,11 @@ import org.opensearch.index.IndexSettings;
|
|||
import org.opensearch.index.VersionType;
|
||||
import org.opensearch.index.codec.CodecService;
|
||||
import org.opensearch.index.fieldvisitor.FieldsVisitor;
|
||||
import org.opensearch.index.mapper.ContentPath;
|
||||
import org.opensearch.index.mapper.IdFieldMapper;
|
||||
import org.opensearch.index.mapper.Mapper.BuilderContext;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
import org.opensearch.index.mapper.Mapping;
|
||||
import org.opensearch.index.mapper.MetadataFieldMapper;
|
||||
import org.opensearch.index.mapper.ParseContext;
|
||||
import org.opensearch.index.mapper.ParseContext.Document;
|
||||
import org.opensearch.index.mapper.ParsedDocument;
|
||||
import org.opensearch.index.mapper.RootObjectMapper;
|
||||
import org.opensearch.index.mapper.SeqNoFieldMapper;
|
||||
import org.opensearch.index.mapper.SourceFieldMapper;
|
||||
import org.opensearch.index.mapper.Uid;
|
||||
|
@ -195,7 +190,6 @@ import java.util.function.ToLongBiFunction;
|
|||
import java.util.stream.Collectors;
|
||||
import java.util.stream.LongStream;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.shuffle;
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
import static org.hamcrest.CoreMatchers.sameInstance;
|
||||
|
@ -304,7 +298,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
if (operation.origin() == PRIMARY) {
|
||||
assertFalse("safe access should NOT be required last indexing round was only append only", engine.isSafeAccessRequired());
|
||||
}
|
||||
engine.delete(new Engine.Delete(operation.type(), operation.id(), operation.uid(), primaryTerm.get()));
|
||||
engine.delete(new Engine.Delete(operation.id(), operation.uid(), primaryTerm.get()));
|
||||
assertTrue("safe access should be required", engine.isSafeAccessRequired());
|
||||
engine.refresh("test");
|
||||
assertTrue("safe access should be required", engine.isSafeAccessRequired());
|
||||
|
@ -478,7 +472,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
liveDocsFirstSegment.remove(idToUpdate);
|
||||
ParsedDocument doc = testParsedDocument(idToUpdate, null, testDocument(), B_1, null);
|
||||
if (randomBoolean()) {
|
||||
engine.delete(new Engine.Delete(doc.type(), doc.id(), newUid(doc), primaryTerm.get()));
|
||||
engine.delete(new Engine.Delete(doc.id(), newUid(doc), primaryTerm.get()));
|
||||
deletes++;
|
||||
} else {
|
||||
engine.index(indexForDoc(doc));
|
||||
|
@ -609,7 +603,6 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
initialEngine.index(operation);
|
||||
} else {
|
||||
final Engine.Delete operation = new Engine.Delete(
|
||||
"test",
|
||||
"1",
|
||||
newUid(doc),
|
||||
UNASSIGNED_SEQ_NO,
|
||||
|
@ -879,7 +872,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
searchResult.close();
|
||||
|
||||
// now delete
|
||||
engine.delete(new Engine.Delete("test", "1", newUid(doc), primaryTerm.get()));
|
||||
engine.delete(new Engine.Delete("1", newUid(doc), primaryTerm.get()));
|
||||
|
||||
// its not deleted yet
|
||||
searchResult = engine.acquireSearcher("test");
|
||||
|
@ -1026,7 +1019,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
// don't release the search result yet...
|
||||
|
||||
// delete, refresh and do a new search, it should not be there
|
||||
engine.delete(new Engine.Delete("test", "1", newUid(doc), primaryTerm.get()));
|
||||
engine.delete(new Engine.Delete("1", newUid(doc), primaryTerm.get()));
|
||||
engine.refresh("test");
|
||||
Engine.Searcher updateSearchResult = engine.acquireSearcher("test");
|
||||
MatcherAssert.assertThat(updateSearchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
|
||||
|
@ -1471,7 +1464,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
|
||||
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
|
||||
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
|
||||
final MapperService mapperService = createMapperService("test");
|
||||
final MapperService mapperService = createMapperService();
|
||||
final Set<String> liveDocs = new HashSet<>();
|
||||
try (
|
||||
Store store = createStore();
|
||||
|
@ -1488,7 +1481,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
for (int i = 0; i < numDocs; i++) {
|
||||
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null);
|
||||
if (randomBoolean()) {
|
||||
engine.delete(new Engine.Delete(doc.type(), doc.id(), newUid(doc.id()), primaryTerm.get()));
|
||||
engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get()));
|
||||
liveDocs.remove(doc.id());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
|
@ -1550,7 +1543,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
|
||||
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
|
||||
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
|
||||
final MapperService mapperService = createMapperService("test");
|
||||
final MapperService mapperService = createMapperService();
|
||||
final boolean omitSourceAllTheTime = randomBoolean();
|
||||
final Set<String> liveDocs = new HashSet<>();
|
||||
final Set<String> liveDocsWithSource = new HashSet<>();
|
||||
|
@ -1574,7 +1567,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
boolean useRecoverySource = randomBoolean() || omitSourceAllTheTime;
|
||||
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null, useRecoverySource);
|
||||
if (randomBoolean()) {
|
||||
engine.delete(new Engine.Delete(doc.type(), doc.id(), newUid(doc.id()), primaryTerm.get()));
|
||||
engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get()));
|
||||
liveDocs.remove(doc.id());
|
||||
liveDocsWithSource.remove(doc.id());
|
||||
}
|
||||
|
@ -1826,7 +1819,6 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
} else {
|
||||
Engine.Delete delete = (Engine.Delete) operation;
|
||||
return new Engine.Delete(
|
||||
delete.type(),
|
||||
delete.id(),
|
||||
delete.uid(),
|
||||
newSeqNo,
|
||||
|
@ -1930,7 +1922,6 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
0
|
||||
);
|
||||
BiFunction<Long, Engine.Delete, Engine.Delete> delWithVersion = (version, delete) -> new Engine.Delete(
|
||||
delete.type(),
|
||||
delete.id(),
|
||||
delete.uid(),
|
||||
UNASSIGNED_SEQ_NO,
|
||||
|
@ -1957,7 +1948,6 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
term
|
||||
);
|
||||
TriFunction<Long, Long, Engine.Delete, Engine.Delete> delWithSeq = (seqNo, term, delete) -> new Engine.Delete(
|
||||
delete.type(),
|
||||
delete.id(),
|
||||
delete.uid(),
|
||||
UNASSIGNED_SEQ_NO,
|
||||
|
@ -1984,7 +1974,6 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
index.getIfPrimaryTerm()
|
||||
);
|
||||
Function<Engine.Delete, Engine.Delete> deleteWithCurrentTerm = delete -> new Engine.Delete(
|
||||
delete.type(),
|
||||
delete.id(),
|
||||
delete.uid(),
|
||||
UNASSIGNED_SEQ_NO,
|
||||
|
@ -2371,7 +2360,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
indexResult = engine.index(index);
|
||||
assertFalse(indexResult.isCreated());
|
||||
|
||||
engine.delete(new Engine.Delete("doc", "1", newUid(doc), primaryTerm.get()));
|
||||
engine.delete(new Engine.Delete("1", newUid(doc), primaryTerm.get()));
|
||||
|
||||
index = indexForDoc(doc);
|
||||
indexResult = engine.index(index);
|
||||
|
@ -2503,7 +2492,6 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
// we have some docs indexed, so delete one of them
|
||||
id = randomFrom(indexedIds);
|
||||
final Engine.Delete delete = new Engine.Delete(
|
||||
"test",
|
||||
id,
|
||||
newUid(id),
|
||||
UNASSIGNED_SEQ_NO,
|
||||
|
@ -2817,7 +2805,6 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
// Delete document we just added:
|
||||
engine.delete(
|
||||
new Engine.Delete(
|
||||
"test",
|
||||
"1",
|
||||
newUid(doc),
|
||||
UNASSIGNED_SEQ_NO,
|
||||
|
@ -2845,7 +2832,6 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
// Delete non-existent document
|
||||
engine.delete(
|
||||
new Engine.Delete(
|
||||
"test",
|
||||
"2",
|
||||
newUid("2"),
|
||||
UNASSIGNED_SEQ_NO,
|
||||
|
@ -3234,15 +3220,6 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
private Mapping dynamicUpdate() {
|
||||
BuilderContext context = new BuilderContext(
|
||||
Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build(),
|
||||
new ContentPath()
|
||||
);
|
||||
final RootObjectMapper root = new RootObjectMapper.Builder("some_type").build(context);
|
||||
return new Mapping(Version.CURRENT, root, new MetadataFieldMapper[0], emptyMap());
|
||||
}
|
||||
|
||||
private Path[] filterExtraFSFiles(Path[] files) {
|
||||
List<Path> paths = new ArrayList<>();
|
||||
for (Path p : files) {
|
||||
|
@ -3278,7 +3255,6 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
}
|
||||
assertVisibleCount(engine, numDocs);
|
||||
translogHandler = createTranslogHandler(engine.engineConfig.getIndexSettings());
|
||||
translogHandler.mappingUpdate = dynamicUpdate();
|
||||
|
||||
engine.close();
|
||||
// we need to reuse the engine config unless the parser.mappingModified won't work
|
||||
|
@ -3288,12 +3264,6 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
|
||||
assertVisibleCount(engine, numDocs, false);
|
||||
assertEquals(numDocs, translogHandler.appliedOperations());
|
||||
if (translogHandler.mappingUpdate != null) {
|
||||
assertEquals(1, translogHandler.getRecoveredTypes().size());
|
||||
assertTrue(translogHandler.getRecoveredTypes().containsKey("test"));
|
||||
} else {
|
||||
assertEquals(0, translogHandler.getRecoveredTypes().size());
|
||||
}
|
||||
|
||||
engine.close();
|
||||
translogHandler = createTranslogHandler(engine.engineConfig.getIndexSettings());
|
||||
|
@ -3358,7 +3328,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L));
|
||||
}
|
||||
assertEquals(flush ? 1 : 2, translogHandler.appliedOperations());
|
||||
engine.delete(new Engine.Delete("test", Integer.toString(randomId), newUid(doc), primaryTerm.get()));
|
||||
engine.delete(new Engine.Delete(Integer.toString(randomId), newUid(doc), primaryTerm.get()));
|
||||
if (randomBoolean()) {
|
||||
engine.close();
|
||||
engine = createEngine(store, primaryTranslogDir, inSyncGlobalCheckpointSupplier);
|
||||
|
@ -3405,7 +3375,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
primaryTerm::get,
|
||||
seqNo -> {}
|
||||
);
|
||||
translog.add(new Translog.Index("test", "SomeBogusId", 0, primaryTerm.get(), "{}".getBytes(Charset.forName("UTF-8"))));
|
||||
translog.add(new Translog.Index("SomeBogusId", 0, primaryTerm.get(), "{}".getBytes(Charset.forName("UTF-8"))));
|
||||
assertEquals(generation.translogFileGeneration, translog.currentFileGeneration());
|
||||
translog.close();
|
||||
|
||||
|
@ -3689,10 +3659,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
}
|
||||
// now the engine is closed check we respond correctly
|
||||
expectThrows(AlreadyClosedException.class, () -> engine.index(indexForDoc(doc1)));
|
||||
expectThrows(
|
||||
AlreadyClosedException.class,
|
||||
() -> engine.delete(new Engine.Delete("test", "", newUid(doc1), primaryTerm.get()))
|
||||
);
|
||||
expectThrows(AlreadyClosedException.class, () -> engine.delete(new Engine.Delete("", newUid(doc1), primaryTerm.get())));
|
||||
expectThrows(
|
||||
AlreadyClosedException.class,
|
||||
() -> engine.noOp(
|
||||
|
@ -3714,8 +3681,8 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
try (Store store = createStore()) {
|
||||
EngineConfig.TombstoneDocSupplier tombstoneDocSupplier = new EngineConfig.TombstoneDocSupplier() {
|
||||
@Override
|
||||
public ParsedDocument newDeleteTombstoneDoc(String type, String id) {
|
||||
ParsedDocument parsedDocument = tombstoneDocSupplier().newDeleteTombstoneDoc(type, id);
|
||||
public ParsedDocument newDeleteTombstoneDoc(String id) {
|
||||
ParsedDocument parsedDocument = tombstoneDocSupplier().newDeleteTombstoneDoc(id);
|
||||
parsedDocument.rootDoc().add(new StoredField("foo", "bar") {
|
||||
// this is a hack to add a failure during store document which triggers a tragic event
|
||||
// and in turn fails the engine
|
||||
|
@ -3736,10 +3703,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
try (InternalEngine engine = createEngine(null, null, null, config)) {
|
||||
final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null);
|
||||
engine.index(indexForDoc(doc));
|
||||
expectThrows(
|
||||
IllegalStateException.class,
|
||||
() -> engine.delete(new Engine.Delete("test", "1", newUid("1"), primaryTerm.get()))
|
||||
);
|
||||
expectThrows(IllegalStateException.class, () -> engine.delete(new Engine.Delete("1", newUid("1"), primaryTerm.get())));
|
||||
assertTrue(engine.isClosed.get());
|
||||
assertSame(tragicException, engine.failedEngine.get());
|
||||
}
|
||||
|
@ -3839,7 +3803,6 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
Engine.Index operation = appendOnlyReplica(doc, false, 1, randomIntBetween(0, 5));
|
||||
Engine.Index retry = appendOnlyReplica(doc, true, 1, randomIntBetween(0, 5));
|
||||
Engine.Delete delete = new Engine.Delete(
|
||||
operation.type(),
|
||||
operation.id(),
|
||||
operation.uid(),
|
||||
Math.max(retry.seqNo(), operation.seqNo()) + 1,
|
||||
|
@ -4000,7 +3963,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
assertEquals(1, topDocs.totalHits.value);
|
||||
}
|
||||
if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) {
|
||||
List<Translog.Operation> ops = readAllOperationsInLucene(engine, createMapperService("test"));
|
||||
List<Translog.Operation> ops = readAllOperationsInLucene(engine, createMapperService());
|
||||
assertThat(ops.stream().map(o -> o.seqNo()).collect(Collectors.toList()), hasItem(20L));
|
||||
}
|
||||
}
|
||||
|
@ -4597,7 +4560,6 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
} else {
|
||||
operations.add(
|
||||
new Engine.Delete(
|
||||
doc.type(),
|
||||
doc.id(),
|
||||
EngineTestCase.newUid(doc),
|
||||
seqNo,
|
||||
|
@ -4806,7 +4768,6 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
operations.add(index);
|
||||
} else {
|
||||
final Engine.Delete delete = new Engine.Delete(
|
||||
"test",
|
||||
"1",
|
||||
uid,
|
||||
sequenceNumberSupplier.getAsLong(),
|
||||
|
@ -4868,7 +4829,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
*/
|
||||
public void testVersionConflictIgnoreDeletedDoc() throws IOException {
|
||||
ParsedDocument doc = testParsedDocument("1", null, testDocument(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
|
||||
engine.delete(new Engine.Delete("test", "1", newUid("1"), 1));
|
||||
engine.delete(new Engine.Delete("1", newUid("1"), 1));
|
||||
for (long seqNo : new long[] { 0, 1, randomNonNegativeLong() }) {
|
||||
assertDeletedVersionConflict(
|
||||
engine.index(
|
||||
|
@ -4893,7 +4854,6 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
assertDeletedVersionConflict(
|
||||
engine.delete(
|
||||
new Engine.Delete(
|
||||
"test",
|
||||
"1",
|
||||
newUid("1"),
|
||||
UNASSIGNED_SEQ_NO,
|
||||
|
@ -4973,7 +4933,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
assertThat(noOp.primaryTerm(), equalTo(primaryTerm.get()));
|
||||
assertThat(noOp.reason(), equalTo(reason));
|
||||
if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) {
|
||||
MapperService mapperService = createMapperService("test");
|
||||
MapperService mapperService = createMapperService();
|
||||
List<Translog.Operation> operationsFromLucene = readAllOperationsInLucene(noOpEngine, mapperService);
|
||||
assertThat(operationsFromLucene, hasSize(maxSeqNo + 2 - localCheckpoint)); // fills n gap and 2 manual noop.
|
||||
for (int i = 0; i < operationsFromLucene.size(); i++) {
|
||||
|
@ -5050,7 +5010,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
}
|
||||
}
|
||||
if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) {
|
||||
List<Translog.Operation> operations = readAllOperationsInLucene(engine, createMapperService("test"));
|
||||
List<Translog.Operation> operations = readAllOperationsInLucene(engine, createMapperService());
|
||||
assertThat(operations, hasSize(numOps));
|
||||
}
|
||||
}
|
||||
|
@ -5207,7 +5167,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
equalTo(0)
|
||||
);
|
||||
}
|
||||
assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, createMapperService("test"));
|
||||
assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, createMapperService());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -5409,7 +5369,6 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
) {
|
||||
final String id = "id";
|
||||
final Field uidField = new Field("_id", id, IdFieldMapper.Defaults.FIELD_TYPE);
|
||||
final String type = "type";
|
||||
final Field versionField = new NumericDocValuesField("_version", 0);
|
||||
final SeqNoFieldMapper.SequenceIDFields seqID = SeqNoFieldMapper.SequenceIDFields.emptySeqID();
|
||||
final ParseContext.Document document = new ParseContext.Document();
|
||||
|
@ -5423,7 +5382,6 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
versionField,
|
||||
seqID,
|
||||
id,
|
||||
type,
|
||||
"routing",
|
||||
Collections.singletonList(document),
|
||||
source,
|
||||
|
@ -5450,7 +5408,6 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
assertThat(seqNoGenerator.get(), equalTo(seqNo + 1));
|
||||
|
||||
final Engine.Delete delete = new Engine.Delete(
|
||||
type,
|
||||
id,
|
||||
new Term("_id", parsedDocument.id()),
|
||||
UNASSIGNED_SEQ_NO,
|
||||
|
@ -5577,7 +5534,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
Engine.Index operation = appendOnlyPrimary(doc, false, 1);
|
||||
engine.index(operation);
|
||||
if (rarely()) {
|
||||
engine.delete(new Engine.Delete(operation.type(), operation.id(), operation.uid(), primaryTerm.get()));
|
||||
engine.delete(new Engine.Delete(operation.id(), operation.uid(), primaryTerm.get()));
|
||||
numDeletes.incrementAndGet();
|
||||
} else {
|
||||
doc = testParsedDocument(
|
||||
|
@ -5915,7 +5872,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
);
|
||||
// first index an append only document and then delete it. such that we have it in the tombstones
|
||||
engine.index(doc);
|
||||
engine.delete(new Engine.Delete(doc.type(), doc.id(), doc.uid(), primaryTerm.get()));
|
||||
engine.delete(new Engine.Delete(doc.id(), doc.uid(), primaryTerm.get()));
|
||||
|
||||
// now index more append only docs and refresh so we re-enabel the optimization for unsafe version map
|
||||
ParsedDocument document1 = testParsedDocument(Integer.toString(1), null, testDocumentWithTextField(), SOURCE, null);
|
||||
|
@ -6163,7 +6120,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
|
||||
}
|
||||
}
|
||||
MapperService mapperService = createMapperService("test");
|
||||
MapperService mapperService = createMapperService();
|
||||
List<Translog.Operation> luceneOps = readAllOperationsBasedOnSource(engine, mapperService);
|
||||
assertThat(luceneOps.stream().map(o -> o.seqNo()).collect(Collectors.toList()), containsInAnyOrder(expectedSeqNos.toArray()));
|
||||
}
|
||||
|
@ -6230,7 +6187,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
|
||||
}
|
||||
}
|
||||
MapperService mapperService = createMapperService("test");
|
||||
MapperService mapperService = createMapperService();
|
||||
List<Translog.Operation> actualOps = readAllOperationsInLucene(engine, mapperService);
|
||||
assertThat(actualOps.stream().map(o -> o.seqNo()).collect(Collectors.toList()), containsInAnyOrder(expectedSeqNos.toArray()));
|
||||
assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService);
|
||||
|
@ -6320,7 +6277,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
long minRetainSeqNos = engine.getMinRetainedSeqNo();
|
||||
assertThat(minRetainSeqNos, lessThanOrEqualTo(globalCheckpoint.get() + 1));
|
||||
Long[] expectedOps = existingSeqNos.stream().filter(seqno -> seqno >= minRetainSeqNos).toArray(Long[]::new);
|
||||
Set<Long> actualOps = readAllOperationsInLucene(engine, createMapperService("test")).stream()
|
||||
Set<Long> actualOps = readAllOperationsInLucene(engine, createMapperService()).stream()
|
||||
.map(Translog.Operation::seqNo)
|
||||
.collect(Collectors.toSet());
|
||||
assertThat(actualOps, containsInAnyOrder(expectedOps));
|
||||
|
@ -6369,7 +6326,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
}
|
||||
|
||||
public void testLuceneSnapshotRefreshesOnlyOnce() throws Exception {
|
||||
final MapperService mapperService = createMapperService("test");
|
||||
final MapperService mapperService = createMapperService();
|
||||
final long maxSeqNo = randomLongBetween(10, 50);
|
||||
final AtomicLong refreshCounter = new AtomicLong();
|
||||
try (
|
||||
|
@ -6484,7 +6441,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
);
|
||||
}
|
||||
} else {
|
||||
Engine.DeleteResult result = engine.delete(new Engine.Delete(doc.type(), doc.id(), newUid(doc.id()), primaryTerm.get()));
|
||||
Engine.DeleteResult result = engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get()));
|
||||
liveDocIds.remove(doc.id());
|
||||
assertThat(
|
||||
"delete operations on primary must advance max_seq_no_of_updates",
|
||||
|
@ -6712,7 +6669,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
index(engine, i);
|
||||
}
|
||||
engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
|
||||
engine.delete(new Engine.Delete("_doc", "0", newUid("0"), primaryTerm.get()));
|
||||
engine.delete(new Engine.Delete("0", newUid("0"), primaryTerm.get()));
|
||||
engine.refresh("test");
|
||||
// now we have 2 segments since we now added a tombstone plus the old segment with the delete
|
||||
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
|
||||
|
@ -6913,7 +6870,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
return iw.get();
|
||||
}, null, null, config(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null))) {
|
||||
engine.index(new Engine.Index(newUid("0"), primaryTerm.get(), InternalEngineTests.createParsedDoc("0", null)));
|
||||
final Engine.Delete op = new Engine.Delete("_doc", "0", newUid("0"), primaryTerm.get());
|
||||
final Engine.Delete op = new Engine.Delete("0", newUid("0"), primaryTerm.get());
|
||||
consumer.accept(engine, op);
|
||||
iw.get().setThrowFailure(() -> new IllegalArgumentException("fatal"));
|
||||
final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> engine.delete(op));
|
||||
|
@ -7185,7 +7142,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
operations.add(indexForDoc(createParsedDoc(id, null)));
|
||||
} else {
|
||||
id = "not_found";
|
||||
operations.add(new Engine.Delete("_doc", id, newUid(id), primaryTerm.get()));
|
||||
operations.add(new Engine.Delete(id, newUid(id), primaryTerm.get()));
|
||||
}
|
||||
}
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
|
|
|
@ -59,7 +59,7 @@ public class LuceneChangesSnapshotTests extends EngineTestCase {
|
|||
|
||||
@Before
|
||||
public void createMapper() throws Exception {
|
||||
mapperService = createMapperService("test");
|
||||
mapperService = createMapperService();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -92,7 +92,7 @@ public class LuceneChangesSnapshotTests extends EngineTestCase {
|
|||
if (randomBoolean()) {
|
||||
engine.index(indexForDoc(doc));
|
||||
} else {
|
||||
engine.delete(new Engine.Delete(doc.type(), doc.id(), newUid(doc.id()), primaryTerm.get()));
|
||||
engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get()));
|
||||
}
|
||||
if (rarely()) {
|
||||
if (randomBoolean()) {
|
||||
|
@ -264,7 +264,7 @@ public class LuceneChangesSnapshotTests extends EngineTestCase {
|
|||
if (randomBoolean()) {
|
||||
op = new Engine.Index(newUid(doc), primaryTerm.get(), doc);
|
||||
} else {
|
||||
op = new Engine.Delete(doc.type(), doc.id(), newUid(doc.id()), primaryTerm.get());
|
||||
op = new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get());
|
||||
}
|
||||
} else {
|
||||
if (randomBoolean()) {
|
||||
|
|
|
@ -152,7 +152,7 @@ public class NoOpEngineTests extends EngineTestCase {
|
|||
for (int i = 0; i < numDocs; i++) {
|
||||
if (randomBoolean()) {
|
||||
String delId = Integer.toString(i);
|
||||
Engine.DeleteResult result = engine.delete(new Engine.Delete("_doc", delId, newUid(delId), primaryTerm.get()));
|
||||
Engine.DeleteResult result = engine.delete(new Engine.Delete(delId, newUid(delId), primaryTerm.get()));
|
||||
assertTrue(result.isFound());
|
||||
engine.syncTranslog(); // advance persisted local checkpoint
|
||||
globalCheckpoint.set(engine.getPersistedLocalCheckpoint());
|
||||
|
|
|
@ -112,7 +112,7 @@ public class ReadOnlyEngineTests extends EngineTestCase {
|
|||
for (int i = 0; i < numDocs; i++) {
|
||||
if (randomBoolean()) {
|
||||
String delId = Integer.toString(i);
|
||||
engine.delete(new Engine.Delete("test", delId, newUid(delId), primaryTerm.get()));
|
||||
engine.delete(new Engine.Delete(delId, newUid(delId), primaryTerm.get()));
|
||||
}
|
||||
if (rarely()) {
|
||||
engine.flush();
|
||||
|
|
|
@ -81,16 +81,16 @@ public class BinaryDVFieldDataTests extends AbstractFieldDataTestCase {
|
|||
doc.endArray();
|
||||
}
|
||||
doc.endObject();
|
||||
ParsedDocument d = mapper.parse(new SourceToParse("test", "test", "1", BytesReference.bytes(doc), XContentType.JSON));
|
||||
ParsedDocument d = mapper.parse(new SourceToParse("test", "1", BytesReference.bytes(doc), XContentType.JSON));
|
||||
writer.addDocument(d.rootDoc());
|
||||
|
||||
BytesRef bytes1 = randomBytes();
|
||||
doc = XContentFactory.jsonBuilder().startObject().field("field", bytes1.bytes, bytes1.offset, bytes1.length).endObject();
|
||||
d = mapper.parse(new SourceToParse("test", "test", "2", BytesReference.bytes(doc), XContentType.JSON));
|
||||
d = mapper.parse(new SourceToParse("test", "2", BytesReference.bytes(doc), XContentType.JSON));
|
||||
writer.addDocument(d.rootDoc());
|
||||
|
||||
doc = XContentFactory.jsonBuilder().startObject().endObject();
|
||||
d = mapper.parse(new SourceToParse("test", "test", "3", BytesReference.bytes(doc), XContentType.JSON));
|
||||
d = mapper.parse(new SourceToParse("test", "3", BytesReference.bytes(doc), XContentType.JSON));
|
||||
writer.addDocument(d.rootDoc());
|
||||
|
||||
// test remove duplicate value
|
||||
|
@ -106,7 +106,7 @@ public class BinaryDVFieldDataTests extends AbstractFieldDataTestCase {
|
|||
doc.endArray();
|
||||
}
|
||||
doc.endObject();
|
||||
d = mapper.parse(new SourceToParse("test", "test", "4", BytesReference.bytes(doc), XContentType.JSON));
|
||||
d = mapper.parse(new SourceToParse("test", "4", BytesReference.bytes(doc), XContentType.JSON));
|
||||
writer.addDocument(d.rootDoc());
|
||||
|
||||
IndexFieldData<?> indexFieldData = getForField("field");
|
||||
|
|
|
@ -76,7 +76,6 @@ public class DataStreamFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = mapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"_doc",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -97,7 +96,6 @@ public class DataStreamFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
mapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"_doc",
|
||||
"3",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -127,7 +125,6 @@ public class DataStreamFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = mapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"_doc",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder().startObject().field(timestampFieldName, "2020-12-06T11:04:05.000Z").endObject()
|
||||
|
@ -146,7 +143,6 @@ public class DataStreamFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
mapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"_doc",
|
||||
"2",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder().startObject().field("invalid-field-name", "2020-12-06T11:04:05.000Z").endObject()
|
||||
|
@ -165,7 +161,6 @@ public class DataStreamFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
mapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"_doc",
|
||||
"3",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
|
|
@ -1063,8 +1063,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
|
|||
// reparse it
|
||||
DocumentMapper builtDocMapper = createDocumentMapper(MapperService.SINGLE_MAPPING_NAME, builtMapping);
|
||||
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/opensearch/index/mapper/simple/test1.json"));
|
||||
Document doc = builtDocMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON))
|
||||
.rootDoc();
|
||||
Document doc = builtDocMapper.parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc();
|
||||
assertThat(doc.getBinaryValue(builtDocMapper.idFieldMapper().name()), equalTo(Uid.encodeId("1")));
|
||||
assertThat(doc.get(builtDocMapper.mappers().getMapper("name.first").name()), equalTo("fred"));
|
||||
}
|
||||
|
@ -1076,8 +1075,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
|
|||
assertThat((String) docMapper.meta().get("param1"), equalTo("value1"));
|
||||
|
||||
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/opensearch/index/mapper/simple/test1.json"));
|
||||
Document doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON))
|
||||
.rootDoc();
|
||||
Document doc = docMapper.parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc();
|
||||
assertThat(doc.getBinaryValue(docMapper.idFieldMapper().name()), equalTo(Uid.encodeId("1")));
|
||||
assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("fred"));
|
||||
}
|
||||
|
@ -1086,8 +1084,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
|
|||
String mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/simple/test-mapping.json");
|
||||
DocumentMapper docMapper = createDocumentMapper(MapperService.SINGLE_MAPPING_NAME, mapping);
|
||||
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/opensearch/index/mapper/simple/test1-notype-noid.json"));
|
||||
Document doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON))
|
||||
.rootDoc();
|
||||
Document doc = docMapper.parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc();
|
||||
assertThat(doc.getBinaryValue(docMapper.idFieldMapper().name()), equalTo(Uid.encodeId("1")));
|
||||
assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("fred"));
|
||||
}
|
||||
|
@ -1109,7 +1106,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
|
|||
BytesReference json = new BytesArray("".getBytes(StandardCharsets.UTF_8));
|
||||
MapperParsingException e = expectThrows(
|
||||
MapperParsingException.class,
|
||||
() -> docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON))
|
||||
() -> docMapper.parse(new SourceToParse("test", "1", json, XContentType.JSON))
|
||||
);
|
||||
assertThat(e.getMessage(), equalTo("failed to parse, document is empty"));
|
||||
}
|
||||
|
|
|
@ -366,7 +366,7 @@ public class DynamicMappingTests extends MapperServiceTestCase {
|
|||
.field("quux", "3.2") // float detected through numeric detection
|
||||
.endObject()
|
||||
);
|
||||
ParsedDocument parsedDocument = mapper.parse(new SourceToParse("index", "_doc", "id", source, builder.contentType()));
|
||||
ParsedDocument parsedDocument = mapper.parse(new SourceToParse("index", "id", source, builder.contentType()));
|
||||
Mapping update = parsedDocument.dynamicMappingsUpdate();
|
||||
assertNotNull(update);
|
||||
assertThat(((FieldMapper) update.root().getMapper("foo")).fieldType().typeName(), equalTo("float"));
|
||||
|
|
|
@ -110,7 +110,6 @@ public class FieldNamesFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = defaultMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder().startObject().field("a", "100").startObject("b").field("c", 42).endObject().endObject()
|
||||
|
@ -148,7 +147,6 @@ public class FieldNamesFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -179,7 +177,6 @@ public class FieldNamesFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()),
|
||||
XContentType.JSON
|
||||
|
|
|
@ -53,7 +53,7 @@ public class GenericStoreDynamicTemplateTests extends OpenSearchSingleNodeTestCa
|
|||
|
||||
byte[] json = copyToBytesFromClasspath("/org/opensearch/index/mapper/dynamictemplate/genericstore/test-data.json");
|
||||
ParsedDocument parsedDoc = mapperService.documentMapper()
|
||||
.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", new BytesArray(json), XContentType.JSON));
|
||||
.parse(new SourceToParse("test", "1", new BytesArray(json), XContentType.JSON));
|
||||
client().admin()
|
||||
.indices()
|
||||
.preparePutMapping("test")
|
||||
|
|
|
@ -72,7 +72,6 @@ public class IdFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("_id", "1").endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -91,7 +90,7 @@ public class IdFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
Settings indexSettings = Settings.EMPTY;
|
||||
MapperService mapperService = createIndex("test", indexSettings).mapperService();
|
||||
DocumentMapper mapper = mapperService.merge("type", new CompressedXContent("{\"type\":{}}"), MergeReason.MAPPING_UPDATE);
|
||||
ParsedDocument document = mapper.parse(new SourceToParse("index", "type", "id", new BytesArray("{}"), XContentType.JSON));
|
||||
ParsedDocument document = mapper.parse(new SourceToParse("index", "id", new BytesArray("{}"), XContentType.JSON));
|
||||
IndexableField[] fields = document.rootDoc().getFields(IdFieldMapper.NAME);
|
||||
assertEquals(1, fields.length);
|
||||
assertEquals(IndexOptions.DOCS, fields[0].fieldType().indexOptions());
|
||||
|
|
|
@ -63,7 +63,6 @@ public class IndexFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()),
|
||||
XContentType.JSON
|
||||
|
|
|
@ -79,7 +79,6 @@ public class IpRangeFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = mapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", entry.getKey()).endObject()),
|
||||
XContentType.JSON
|
||||
|
|
|
@ -56,9 +56,7 @@ public class JavaMultiFieldMergeTests extends OpenSearchSingleNodeTestCase {
|
|||
assertThat(mapperService.fieldType("name.indexed"), nullValue());
|
||||
|
||||
BytesReference json = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject());
|
||||
Document doc = mapperService.documentMapper()
|
||||
.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON))
|
||||
.rootDoc();
|
||||
Document doc = mapperService.documentMapper().parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc();
|
||||
IndexableField f = doc.getField("name");
|
||||
assertThat(f, notNullValue());
|
||||
f = doc.getField("name.indexed");
|
||||
|
@ -74,9 +72,7 @@ public class JavaMultiFieldMergeTests extends OpenSearchSingleNodeTestCase {
|
|||
assertThat(mapperService.fieldType("name.not_indexed2"), nullValue());
|
||||
assertThat(mapperService.fieldType("name.not_indexed3"), nullValue());
|
||||
|
||||
doc = mapperService.documentMapper()
|
||||
.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON))
|
||||
.rootDoc();
|
||||
doc = mapperService.documentMapper().parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc();
|
||||
f = doc.getField("name");
|
||||
assertThat(f, notNullValue());
|
||||
f = doc.getField("name.indexed");
|
||||
|
@ -113,9 +109,7 @@ public class JavaMultiFieldMergeTests extends OpenSearchSingleNodeTestCase {
|
|||
assertThat(mapperService.fieldType("name.indexed"), nullValue());
|
||||
|
||||
BytesReference json = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject());
|
||||
Document doc = mapperService.documentMapper()
|
||||
.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON))
|
||||
.rootDoc();
|
||||
Document doc = mapperService.documentMapper().parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc();
|
||||
IndexableField f = doc.getField("name");
|
||||
assertThat(f, notNullValue());
|
||||
f = doc.getField("name.indexed");
|
||||
|
@ -131,9 +125,7 @@ public class JavaMultiFieldMergeTests extends OpenSearchSingleNodeTestCase {
|
|||
assertThat(mapperService.fieldType("name.not_indexed2"), nullValue());
|
||||
assertThat(mapperService.fieldType("name.not_indexed3"), nullValue());
|
||||
|
||||
doc = mapperService.documentMapper()
|
||||
.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON))
|
||||
.rootDoc();
|
||||
doc = mapperService.documentMapper().parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc();
|
||||
f = doc.getField("name");
|
||||
assertThat(f, notNullValue());
|
||||
f = doc.getField("name.indexed");
|
||||
|
|
|
@ -76,9 +76,7 @@ public class MultiFieldTests extends OpenSearchSingleNodeTestCase {
|
|||
.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
|
||||
|
||||
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/opensearch/index/mapper/multifield/test-data.json"));
|
||||
Document doc = mapperService.documentMapper()
|
||||
.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON))
|
||||
.rootDoc();
|
||||
Document doc = mapperService.documentMapper().parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc();
|
||||
|
||||
IndexableField f = doc.getField("name");
|
||||
assertThat(f.name(), equalTo("name"));
|
||||
|
@ -157,8 +155,7 @@ public class MultiFieldTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(builtMapping));
|
||||
|
||||
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/opensearch/index/mapper/multifield/test-data.json"));
|
||||
Document doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON))
|
||||
.rootDoc();
|
||||
Document doc = docMapper.parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc();
|
||||
|
||||
IndexableField f = doc.getField("name");
|
||||
assertThat(f.name(), equalTo("name"));
|
||||
|
|
|
@ -86,7 +86,6 @@ public class NestedObjectMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").nullField("nested1").endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -98,7 +97,6 @@ public class NestedObjectMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder().startObject().field("field", "value").startArray("nested").endArray().endObject()
|
||||
|
@ -135,7 +133,6 @@ public class NestedObjectMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -161,7 +158,6 @@ public class NestedObjectMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -230,7 +226,6 @@ public class NestedObjectMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -325,7 +320,6 @@ public class NestedObjectMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -421,7 +415,6 @@ public class NestedObjectMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -516,7 +509,6 @@ public class NestedObjectMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -611,7 +603,6 @@ public class NestedObjectMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -681,7 +672,6 @@ public class NestedObjectMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -766,7 +756,6 @@ public class NestedObjectMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -822,7 +811,6 @@ public class NestedObjectMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -975,7 +963,7 @@ public class NestedObjectMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
docBuilder.endArray();
|
||||
}
|
||||
docBuilder.endObject();
|
||||
SourceToParse source1 = new SourceToParse("test1", "type", "1", BytesReference.bytes(docBuilder), XContentType.JSON);
|
||||
SourceToParse source1 = new SourceToParse("test1", "1", BytesReference.bytes(docBuilder), XContentType.JSON);
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source1));
|
||||
assertEquals(
|
||||
"The number of nested documents has exceeded the allowed limit of ["
|
||||
|
@ -1020,7 +1008,7 @@ public class NestedObjectMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
docBuilder.endArray();
|
||||
}
|
||||
docBuilder.endObject();
|
||||
SourceToParse source1 = new SourceToParse("test1", "type", "1", BytesReference.bytes(docBuilder), XContentType.JSON);
|
||||
SourceToParse source1 = new SourceToParse("test1", "1", BytesReference.bytes(docBuilder), XContentType.JSON);
|
||||
ParsedDocument doc = docMapper.parse(source1);
|
||||
assertThat(doc.docs().size(), equalTo(3));
|
||||
|
||||
|
@ -1037,7 +1025,7 @@ public class NestedObjectMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
docBuilder2.endArray();
|
||||
}
|
||||
docBuilder2.endObject();
|
||||
SourceToParse source2 = new SourceToParse("test1", "type", "2", BytesReference.bytes(docBuilder2), XContentType.JSON);
|
||||
SourceToParse source2 = new SourceToParse("test1", "2", BytesReference.bytes(docBuilder2), XContentType.JSON);
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source2));
|
||||
assertEquals(
|
||||
"The number of nested documents has exceeded the allowed limit of ["
|
||||
|
@ -1089,7 +1077,7 @@ public class NestedObjectMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
docBuilder.endArray();
|
||||
}
|
||||
docBuilder.endObject();
|
||||
SourceToParse source1 = new SourceToParse("test1", "type", "1", BytesReference.bytes(docBuilder), XContentType.JSON);
|
||||
SourceToParse source1 = new SourceToParse("test1", "1", BytesReference.bytes(docBuilder), XContentType.JSON);
|
||||
ParsedDocument doc = docMapper.parse(source1);
|
||||
assertThat(doc.docs().size(), equalTo(3));
|
||||
|
||||
|
@ -1111,7 +1099,7 @@ public class NestedObjectMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
|
||||
}
|
||||
docBuilder2.endObject();
|
||||
SourceToParse source2 = new SourceToParse("test1", "type", "2", BytesReference.bytes(docBuilder2), XContentType.JSON);
|
||||
SourceToParse source2 = new SourceToParse("test1", "2", BytesReference.bytes(docBuilder2), XContentType.JSON);
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source2));
|
||||
assertEquals(
|
||||
"The number of nested documents has exceeded the allowed limit of ["
|
||||
|
|
|
@ -65,7 +65,6 @@ public class NullValueObjectMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = defaultMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder().startObject().startObject("obj1").endObject().field("value1", "test1").endObject()
|
||||
|
@ -79,7 +78,6 @@ public class NullValueObjectMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
doc = defaultMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField("obj1").field("value1", "test1").endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -91,7 +89,6 @@ public class NullValueObjectMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
doc = defaultMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
|
|
@ -59,7 +59,6 @@ public class ObjectMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
defaultMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
new BytesArray(
|
||||
" {\n"
|
||||
|
|
|
@ -53,7 +53,7 @@ public class PathMatchDynamicTemplateTests extends OpenSearchSingleNodeTestCase
|
|||
|
||||
byte[] json = copyToBytesFromClasspath("/org/opensearch/index/mapper/dynamictemplate/pathmatch/test-data.json");
|
||||
ParsedDocument parsedDoc = mapperService.documentMapper()
|
||||
.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", new BytesArray(json), XContentType.JSON));
|
||||
.parse(new SourceToParse("test", "1", new BytesArray(json), XContentType.JSON));
|
||||
client().admin()
|
||||
.indices()
|
||||
.preparePutMapping("test")
|
||||
|
|
|
@ -53,7 +53,6 @@ public class RoutingFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()),
|
||||
XContentType.JSON,
|
||||
|
@ -75,7 +74,6 @@ public class RoutingFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
docMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("_routing", "foo").endObject()),
|
||||
XContentType.JSON
|
||||
|
|
|
@ -69,7 +69,6 @@ public class SourceFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = documentMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()),
|
||||
XContentType.JSON
|
||||
|
@ -82,7 +81,6 @@ public class SourceFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
doc = documentMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(XContentFactory.smileBuilder().startObject().field("field", "value").endObject()),
|
||||
XContentType.SMILE
|
||||
|
@ -111,7 +109,6 @@ public class SourceFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = documentMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -156,7 +153,6 @@ public class SourceFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = documentMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
@ -325,8 +321,8 @@ public class SourceFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
try {
|
||||
documentMapper.parse(new SourceToParse("test", "type", "1", new BytesArray("{}}"), XContentType.JSON)); // extra end object
|
||||
// (invalid JSON)
|
||||
documentMapper.parse(new SourceToParse("test", "1", new BytesArray("{}}"), XContentType.JSON)); // extra end object
|
||||
// (invalid JSON)
|
||||
fail("Expected parse exception");
|
||||
} catch (MapperParsingException e) {
|
||||
assertNotNull(e.getRootCause());
|
||||
|
|
|
@ -111,7 +111,6 @@ public class StoredNumericValuesTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument doc = mapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
XContentFactory.jsonBuilder()
|
||||
|
|
|
@ -73,7 +73,7 @@ public class TypeFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
public static void testDocValues(Function<String, IndexService> createIndex) throws IOException {
|
||||
MapperService mapperService = createIndex.apply("test").mapperService();
|
||||
DocumentMapper mapper = mapperService.merge("type", new CompressedXContent("{\"type\":{}}"), MergeReason.MAPPING_UPDATE);
|
||||
ParsedDocument document = mapper.parse(new SourceToParse("index", "type", "id", new BytesArray("{}"), XContentType.JSON));
|
||||
ParsedDocument document = mapper.parse(new SourceToParse("index", "id", new BytesArray("{}"), XContentType.JSON));
|
||||
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
|
||||
|
@ -100,7 +100,7 @@ public class TypeFieldMapperTests extends OpenSearchSingleNodeTestCase {
|
|||
Settings indexSettings = Settings.EMPTY;
|
||||
MapperService mapperService = createIndex("test", indexSettings).mapperService();
|
||||
DocumentMapper mapper = mapperService.merge("type", new CompressedXContent("{\"type\":{}}"), MergeReason.MAPPING_UPDATE);
|
||||
ParsedDocument document = mapper.parse(new SourceToParse("index", "type", "id", new BytesArray("{}"), XContentType.JSON));
|
||||
ParsedDocument document = mapper.parse(new SourceToParse("index", "id", new BytesArray("{}"), XContentType.JSON));
|
||||
assertEquals(Collections.<IndexableField>emptyList(), Arrays.asList(document.rootDoc().getFields(TypeFieldMapper.NAME)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -57,6 +57,7 @@ import org.opensearch.index.engine.InternalEngine;
|
|||
import org.opensearch.index.engine.InternalEngineTests;
|
||||
import org.opensearch.index.engine.SegmentsStats;
|
||||
import org.opensearch.index.engine.VersionConflictEngineException;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
import org.opensearch.index.mapper.SeqNoFieldMapper;
|
||||
import org.opensearch.index.seqno.SeqNoStats;
|
||||
import org.opensearch.index.seqno.SequenceNumbers;
|
||||
|
@ -75,7 +76,6 @@ import java.util.ArrayList;
|
|||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.CyclicBarrier;
|
||||
|
@ -315,10 +315,7 @@ public class IndexLevelReplicationTests extends OpenSearchIndexLevelReplicationT
|
|||
}
|
||||
|
||||
public void testConflictingOpsOnReplica() throws Exception {
|
||||
Map<String, String> mappings = Collections.singletonMap(
|
||||
"type",
|
||||
"{ \"type\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}"
|
||||
);
|
||||
String mappings = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}";
|
||||
try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetadata(2, mappings))) {
|
||||
shards.startAll();
|
||||
List<IndexShard> replicas = shards.getReplicas();
|
||||
|
@ -345,10 +342,7 @@ public class IndexLevelReplicationTests extends OpenSearchIndexLevelReplicationT
|
|||
}
|
||||
|
||||
public void testReplicaTermIncrementWithConcurrentPrimaryPromotion() throws Exception {
|
||||
Map<String, String> mappings = Collections.singletonMap(
|
||||
"type",
|
||||
"{ \"type\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}"
|
||||
);
|
||||
String mappings = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}";
|
||||
try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetadata(2, mappings))) {
|
||||
shards.startAll();
|
||||
long primaryPrimaryTerm = shards.getPrimary().getPendingPrimaryTerm();
|
||||
|
@ -398,10 +392,7 @@ public class IndexLevelReplicationTests extends OpenSearchIndexLevelReplicationT
|
|||
}
|
||||
|
||||
public void testReplicaOperationWithConcurrentPrimaryPromotion() throws Exception {
|
||||
Map<String, String> mappings = Collections.singletonMap(
|
||||
"type",
|
||||
"{ \"type\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}"
|
||||
);
|
||||
String mappings = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}";
|
||||
try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetadata(1, mappings))) {
|
||||
shards.startAll();
|
||||
long primaryPrimaryTerm = shards.getPrimary().getPendingPrimaryTerm();
|
||||
|
|
|
@ -75,10 +75,8 @@ import org.opensearch.indices.recovery.RecoveryTarget;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.Future;
|
||||
|
@ -159,7 +157,7 @@ public class RecoveryDuringReplicationTests extends OpenSearchIndexLevelReplicat
|
|||
1,
|
||||
randomNonNegativeLong(),
|
||||
false,
|
||||
new SourceToParse("index", "type", "replica", new BytesArray("{}"), XContentType.JSON)
|
||||
new SourceToParse("index", "replica", new BytesArray("{}"), XContentType.JSON)
|
||||
);
|
||||
shards.promoteReplicaToPrimary(promotedReplica).get();
|
||||
oldPrimary.close("demoted", randomBoolean());
|
||||
|
@ -173,7 +171,7 @@ public class RecoveryDuringReplicationTests extends OpenSearchIndexLevelReplicat
|
|||
promotedReplica.applyIndexOperationOnPrimary(
|
||||
Versions.MATCH_ANY,
|
||||
VersionType.INTERNAL,
|
||||
new SourceToParse("index", "type", "primary", new BytesArray("{}"), XContentType.JSON),
|
||||
new SourceToParse("index", "primary", new BytesArray("{}"), XContentType.JSON),
|
||||
SequenceNumbers.UNASSIGNED_SEQ_NO,
|
||||
0,
|
||||
IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
|
||||
|
@ -358,10 +356,7 @@ public class RecoveryDuringReplicationTests extends OpenSearchIndexLevelReplicat
|
|||
}
|
||||
|
||||
public void testResyncAfterPrimaryPromotion() throws Exception {
|
||||
Map<String, String> mappings = Collections.singletonMap(
|
||||
"type",
|
||||
"{ \"type\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}"
|
||||
);
|
||||
String mappings = "{ \"_doc\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}";
|
||||
try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetadata(2, mappings))) {
|
||||
shards.startAll();
|
||||
int initialDocs = randomInt(10);
|
||||
|
|
|
@ -1861,7 +1861,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
assertEquals(0, postDelete.get());
|
||||
assertEquals(0, postDeleteException.get());
|
||||
|
||||
deleteDoc(shard, "_doc", "1");
|
||||
deleteDoc(shard, "1");
|
||||
|
||||
assertEquals(2, preIndex.get());
|
||||
assertEquals(1, postIndexCreate.get());
|
||||
|
@ -1889,7 +1889,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
assertEquals(1, postDelete.get());
|
||||
assertEquals(0, postDeleteException.get());
|
||||
try {
|
||||
deleteDoc(shard, "_doc", "1");
|
||||
deleteDoc(shard, "1");
|
||||
fail();
|
||||
} catch (AlreadyClosedException e) {
|
||||
|
||||
|
@ -2184,7 +2184,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
final IndexShard shard = newStartedShard(false);
|
||||
long primaryTerm = shard.getOperationPrimaryTerm();
|
||||
shard.advanceMaxSeqNoOfUpdatesOrDeletes(1); // manually advance msu for this delete
|
||||
shard.applyDeleteOperationOnReplica(1, primaryTerm, 2, "_doc", "id");
|
||||
shard.applyDeleteOperationOnReplica(1, primaryTerm, 2, "id");
|
||||
shard.getEngine().rollTranslogGeneration(); // isolate the delete in it's own generation
|
||||
shard.applyIndexOperationOnReplica(
|
||||
0,
|
||||
|
@ -2192,7 +2192,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
1,
|
||||
IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
|
||||
false,
|
||||
new SourceToParse(shard.shardId().getIndexName(), "_doc", "id", new BytesArray("{}"), XContentType.JSON)
|
||||
new SourceToParse(shard.shardId().getIndexName(), "id", new BytesArray("{}"), XContentType.JSON)
|
||||
);
|
||||
shard.applyIndexOperationOnReplica(
|
||||
3,
|
||||
|
@ -2200,7 +2200,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
3,
|
||||
IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
|
||||
false,
|
||||
new SourceToParse(shard.shardId().getIndexName(), "_doc", "id-3", new BytesArray("{}"), XContentType.JSON)
|
||||
new SourceToParse(shard.shardId().getIndexName(), "id-3", new BytesArray("{}"), XContentType.JSON)
|
||||
);
|
||||
// Flushing a new commit with local checkpoint=1 allows to skip the translog gen #1 in recovery.
|
||||
shard.flush(new FlushRequest().force(true).waitIfOngoing(true));
|
||||
|
@ -2210,7 +2210,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
3,
|
||||
IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
|
||||
false,
|
||||
new SourceToParse(shard.shardId().getIndexName(), "_doc", "id-2", new BytesArray("{}"), XContentType.JSON)
|
||||
new SourceToParse(shard.shardId().getIndexName(), "id-2", new BytesArray("{}"), XContentType.JSON)
|
||||
);
|
||||
shard.applyIndexOperationOnReplica(
|
||||
5,
|
||||
|
@ -2218,7 +2218,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
1,
|
||||
IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
|
||||
false,
|
||||
new SourceToParse(shard.shardId().getIndexName(), "_doc", "id-5", new BytesArray("{}"), XContentType.JSON)
|
||||
new SourceToParse(shard.shardId().getIndexName(), "id-5", new BytesArray("{}"), XContentType.JSON)
|
||||
);
|
||||
shard.sync(); // advance local checkpoint
|
||||
|
||||
|
@ -2358,13 +2358,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
// start a replica shard and index the second doc
|
||||
final IndexShard otherShard = newStartedShard(false);
|
||||
updateMappings(otherShard, shard.indexSettings().getIndexMetadata());
|
||||
SourceToParse sourceToParse = new SourceToParse(
|
||||
shard.shardId().getIndexName(),
|
||||
"_doc",
|
||||
"1",
|
||||
new BytesArray("{}"),
|
||||
XContentType.JSON
|
||||
);
|
||||
SourceToParse sourceToParse = new SourceToParse(shard.shardId().getIndexName(), "1", new BytesArray("{}"), XContentType.JSON);
|
||||
otherShard.applyIndexOperationOnReplica(
|
||||
1,
|
||||
otherShard.getOperationPrimaryTerm(),
|
||||
|
@ -2498,7 +2492,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
1,
|
||||
IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
|
||||
false,
|
||||
new SourceToParse(indexName, "_doc", "doc-0", new BytesArray("{}"), XContentType.JSON)
|
||||
new SourceToParse(indexName, "doc-0", new BytesArray("{}"), XContentType.JSON)
|
||||
);
|
||||
flushShard(shard);
|
||||
shard.updateGlobalCheckpointOnReplica(0, "test"); // stick the global checkpoint here.
|
||||
|
@ -2508,7 +2502,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
1,
|
||||
IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
|
||||
false,
|
||||
new SourceToParse(indexName, "_doc", "doc-1", new BytesArray("{}"), XContentType.JSON)
|
||||
new SourceToParse(indexName, "doc-1", new BytesArray("{}"), XContentType.JSON)
|
||||
);
|
||||
flushShard(shard);
|
||||
assertThat(getShardDocUIDs(shard), containsInAnyOrder("doc-0", "doc-1"));
|
||||
|
@ -2520,7 +2514,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
1,
|
||||
IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
|
||||
false,
|
||||
new SourceToParse(indexName, "_doc", "doc-2", new BytesArray("{}"), XContentType.JSON)
|
||||
new SourceToParse(indexName, "doc-2", new BytesArray("{}"), XContentType.JSON)
|
||||
);
|
||||
flushShard(shard);
|
||||
assertThat(getShardDocUIDs(shard), containsInAnyOrder("doc-0", "doc-1", "doc-2"));
|
||||
|
@ -2708,7 +2702,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.build();
|
||||
IndexMetadata metadata = IndexMetadata.builder("test")
|
||||
.putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\", \"fielddata\": true }}}")
|
||||
.putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\", \"fielddata\": true }}}")
|
||||
.settings(settings)
|
||||
.primaryTerm(0, 1)
|
||||
.build();
|
||||
|
@ -2761,7 +2755,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
public void testIndexingOperationListenersIsInvokedOnRecovery() throws IOException {
|
||||
IndexShard shard = newStartedShard(true);
|
||||
indexDoc(shard, "_doc", "0", "{\"foo\" : \"bar\"}");
|
||||
deleteDoc(shard, "_doc", "0");
|
||||
deleteDoc(shard, "0");
|
||||
indexDoc(shard, "_doc", "1", "{\"foo\" : \"bar\"}");
|
||||
shard.refresh("test");
|
||||
|
||||
|
@ -2848,7 +2842,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.build();
|
||||
IndexMetadata metadata = IndexMetadata.builder("test")
|
||||
.putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.settings(settings)
|
||||
.primaryTerm(0, 1)
|
||||
.build();
|
||||
|
@ -2893,7 +2887,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.build();
|
||||
IndexMetadata metadata = IndexMetadata.builder("test")
|
||||
.putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.settings(settings)
|
||||
.primaryTerm(0, randomLongBetween(1, Long.MAX_VALUE))
|
||||
.build();
|
||||
|
@ -2905,7 +2899,6 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
if (randomBoolean()) {
|
||||
operations.add(
|
||||
new Translog.Index(
|
||||
"_doc",
|
||||
"1",
|
||||
0,
|
||||
primary.getPendingPrimaryTerm(),
|
||||
|
@ -2919,7 +2912,6 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
// corrupt entry
|
||||
operations.add(
|
||||
new Translog.Index(
|
||||
"_doc",
|
||||
"2",
|
||||
1,
|
||||
primary.getPendingPrimaryTerm(),
|
||||
|
@ -2978,7 +2970,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.build();
|
||||
IndexMetadata metadata = IndexMetadata.builder("test")
|
||||
.putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.settings(settings)
|
||||
.primaryTerm(0, 1)
|
||||
.build();
|
||||
|
@ -3028,7 +3020,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.build();
|
||||
IndexMetadata metadata = IndexMetadata.builder("test")
|
||||
.putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.settings(settings)
|
||||
.primaryTerm(0, 1)
|
||||
.build();
|
||||
|
@ -3101,7 +3093,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.build();
|
||||
IndexMetadata metadata = IndexMetadata.builder("source")
|
||||
.putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.settings(settings)
|
||||
.primaryTerm(0, 1)
|
||||
.build();
|
||||
|
@ -3238,7 +3230,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
);
|
||||
for (final Integer i : ids) {
|
||||
final String id = Integer.toString(i);
|
||||
deleteDoc(indexShard, "_doc", id);
|
||||
deleteDoc(indexShard, id);
|
||||
indexDoc(indexShard, "_doc", id);
|
||||
}
|
||||
// Need to update and sync the global checkpoint and the retention leases for the soft-deletes retention MergePolicy.
|
||||
|
@ -3355,7 +3347,11 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
|
||||
// Do some updates and deletes, then recheck the correlation again.
|
||||
for (int i = 0; i < numDoc / 2; i++) {
|
||||
indexDoc(indexShard, "_doc", Integer.toString(i), "{\"foo\": \"bar\"}");
|
||||
if (randomBoolean()) {
|
||||
deleteDoc(indexShard, Integer.toString(i));
|
||||
} else {
|
||||
indexDoc(indexShard, "_doc", Integer.toString(i), "{\"foo\": \"bar\"}");
|
||||
}
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
indexShard.flush(new FlushRequest());
|
||||
|
@ -3705,7 +3701,6 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
}
|
||||
SourceToParse sourceToParse = new SourceToParse(
|
||||
indexShard.shardId().getIndexName(),
|
||||
"_doc",
|
||||
id,
|
||||
new BytesArray("{}"),
|
||||
XContentType.JSON
|
||||
|
@ -3742,7 +3737,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.build();
|
||||
IndexMetadata metadata = IndexMetadata.builder("test")
|
||||
.putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.settings(settings)
|
||||
.primaryTerm(0, 1)
|
||||
.build();
|
||||
|
@ -3792,7 +3787,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.build();
|
||||
IndexMetadata metadata = IndexMetadata.builder("test")
|
||||
.putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.settings(settings)
|
||||
.primaryTerm(0, 1)
|
||||
.build();
|
||||
|
@ -3866,7 +3861,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.build();
|
||||
IndexMetadata metadata = IndexMetadata.builder("test")
|
||||
.putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.settings(settings)
|
||||
.primaryTerm(0, 1)
|
||||
.build();
|
||||
|
@ -3932,10 +3927,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
public void testSupplyTombstoneDoc() throws Exception {
|
||||
IndexShard shard = newStartedShard();
|
||||
String id = randomRealisticUnicodeOfLengthBetween(1, 10);
|
||||
ParsedDocument deleteTombstone = shard.getEngine()
|
||||
.config()
|
||||
.getTombstoneDocSupplier()
|
||||
.newDeleteTombstoneDoc(MapperService.SINGLE_MAPPING_NAME, id);
|
||||
ParsedDocument deleteTombstone = shard.getEngine().config().getTombstoneDocSupplier().newDeleteTombstoneDoc(id);
|
||||
assertThat(deleteTombstone.docs(), hasSize(1));
|
||||
ParseContext.Document deleteDoc = deleteTombstone.docs().get(0);
|
||||
assertThat(
|
||||
|
@ -4166,14 +4158,13 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
updateMappings(
|
||||
shard,
|
||||
IndexMetadata.builder(shard.indexSettings.getIndexMetadata())
|
||||
.putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.build()
|
||||
);
|
||||
final List<Translog.Operation> operations = Stream.concat(
|
||||
IntStream.range(0, randomIntBetween(0, 10))
|
||||
.mapToObj(
|
||||
n -> new Translog.Index(
|
||||
"_doc",
|
||||
"1",
|
||||
0,
|
||||
shard.getPendingPrimaryTerm(),
|
||||
|
@ -4187,7 +4178,6 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
IntStream.range(0, randomIntBetween(1, 10))
|
||||
.mapToObj(
|
||||
n -> new Translog.Index(
|
||||
"_doc",
|
||||
"1",
|
||||
0,
|
||||
shard.getPendingPrimaryTerm(),
|
||||
|
@ -4297,7 +4287,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.build();
|
||||
IndexMetadata metadata = IndexMetadata.builder("index")
|
||||
.putMapping("some_type", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.settings(settings)
|
||||
.primaryTerm(0, 1)
|
||||
.build();
|
||||
|
@ -4350,7 +4340,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
1,
|
||||
IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
|
||||
false,
|
||||
new SourceToParse(shard.shardId.getIndexName(), "_doc", Long.toString(i), new BytesArray("{}"), XContentType.JSON)
|
||||
new SourceToParse(shard.shardId.getIndexName(), Long.toString(i), new BytesArray("{}"), XContentType.JSON)
|
||||
);
|
||||
shard.updateGlobalCheckpointOnReplica(shard.getLocalCheckpoint(), "test");
|
||||
if (randomInt(100) < 10) {
|
||||
|
|
|
@ -161,7 +161,7 @@ public class IndexingOperationListenerTests extends OpenSearchTestCase {
|
|||
logger
|
||||
);
|
||||
ParsedDocument doc = InternalEngineTests.createParsedDoc("1", null);
|
||||
Engine.Delete delete = new Engine.Delete("test", "1", new Term("_id", Uid.encodeId(doc.id())), randomNonNegativeLong());
|
||||
Engine.Delete delete = new Engine.Delete("1", new Term("_id", Uid.encodeId(doc.id())), randomNonNegativeLong());
|
||||
Engine.Index index = new Engine.Index(new Term("_id", Uid.encodeId(doc.id())), randomNonNegativeLong(), doc);
|
||||
compositeListener.postDelete(randomShardId, delete, new Engine.DeleteResult(1, 0, SequenceNumbers.UNASSIGNED_SEQ_NO, true));
|
||||
assertEquals(0, preIndex.get());
|
||||
|
|
|
@ -94,7 +94,7 @@ public class PrimaryReplicaSyncerTests extends IndexShardTestCase {
|
|||
shard.applyIndexOperationOnPrimary(
|
||||
Versions.MATCH_ANY,
|
||||
VersionType.INTERNAL,
|
||||
new SourceToParse(shard.shardId().getIndexName(), "_doc", Integer.toString(i), new BytesArray("{}"), XContentType.JSON),
|
||||
new SourceToParse(shard.shardId().getIndexName(), Integer.toString(i), new BytesArray("{}"), XContentType.JSON),
|
||||
SequenceNumbers.UNASSIGNED_SEQ_NO,
|
||||
0,
|
||||
IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
|
||||
|
@ -176,7 +176,7 @@ public class PrimaryReplicaSyncerTests extends IndexShardTestCase {
|
|||
shard.applyIndexOperationOnPrimary(
|
||||
Versions.MATCH_ANY,
|
||||
VersionType.INTERNAL,
|
||||
new SourceToParse(shard.shardId().getIndexName(), "_doc", Integer.toString(i), new BytesArray("{}"), XContentType.JSON),
|
||||
new SourceToParse(shard.shardId().getIndexName(), Integer.toString(i), new BytesArray("{}"), XContentType.JSON),
|
||||
SequenceNumbers.UNASSIGNED_SEQ_NO,
|
||||
0,
|
||||
IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
|
||||
|
|
|
@ -440,17 +440,7 @@ public class RefreshListenersTests extends OpenSearchTestCase {
|
|||
document.add(seqID.seqNoDocValue);
|
||||
document.add(seqID.primaryTerm);
|
||||
BytesReference source = new BytesArray(new byte[] { 1 });
|
||||
ParsedDocument doc = new ParsedDocument(
|
||||
versionField,
|
||||
seqID,
|
||||
id,
|
||||
"test",
|
||||
null,
|
||||
Arrays.asList(document),
|
||||
source,
|
||||
XContentType.JSON,
|
||||
null
|
||||
);
|
||||
ParsedDocument doc = new ParsedDocument(versionField, seqID, id, null, Arrays.asList(document), source, XContentType.JSON, null);
|
||||
Engine.Index index = new Engine.Index(new Term("_id", doc.id()), engine.config().getPrimaryTermSupplier().getAsLong(), doc);
|
||||
return engine.index(index);
|
||||
}
|
||||
|
|
|
@ -142,7 +142,7 @@ public class RemoveCorruptedShardDataCommandTests extends IndexShardTestCase {
|
|||
final IndexMetadata.Builder metadata = IndexMetadata.builder(routing.getIndexName())
|
||||
.settings(settings)
|
||||
.primaryTerm(0, randomIntBetween(1, 100))
|
||||
.putMapping("_doc", "{ \"properties\": {} }");
|
||||
.putMapping("{ \"properties\": {} }");
|
||||
indexMetadata = metadata.build();
|
||||
|
||||
clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(Metadata.builder().put(indexMetadata, false).build()).build();
|
||||
|
|
|
@ -59,7 +59,7 @@ public class ShardGetServiceTests extends IndexShardTestCase {
|
|||
|
||||
.build();
|
||||
IndexMetadata metadata = IndexMetadata.builder("test")
|
||||
.putMapping("test", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.settings(settings)
|
||||
.primaryTerm(0, 1)
|
||||
.build();
|
||||
|
@ -74,7 +74,7 @@ public class ShardGetServiceTests extends IndexShardTestCase {
|
|||
assertEquals(searcher.getIndexReader().maxDoc(), 1); // we refreshed
|
||||
}
|
||||
|
||||
Engine.IndexResult test1 = indexDoc(primary, "test", "1", "{\"foo\" : \"baz\"}", XContentType.JSON, "foobar");
|
||||
Engine.IndexResult test1 = indexDoc(primary, "1", "{\"foo\" : \"baz\"}", XContentType.JSON, "foobar");
|
||||
assertTrue(primary.getEngine().refreshNeeded());
|
||||
GetResult testGet1 = primary.getService().getForUpdate("1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM);
|
||||
assertEquals(new String(testGet1.source(), StandardCharsets.UTF_8), "{\"foo\" : \"baz\"}");
|
||||
|
@ -89,7 +89,7 @@ public class ShardGetServiceTests extends IndexShardTestCase {
|
|||
}
|
||||
|
||||
// now again from the reader
|
||||
Engine.IndexResult test2 = indexDoc(primary, "test", "1", "{\"foo\" : \"baz\"}", XContentType.JSON, "foobar");
|
||||
Engine.IndexResult test2 = indexDoc(primary, "1", "{\"foo\" : \"baz\"}", XContentType.JSON, "foobar");
|
||||
assertTrue(primary.getEngine().refreshNeeded());
|
||||
testGet1 = primary.getService().getForUpdate("1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM);
|
||||
assertEquals(new String(testGet1.source(), StandardCharsets.UTF_8), "{\"foo\" : \"baz\"}");
|
||||
|
@ -134,7 +134,6 @@ public class ShardGetServiceTests extends IndexShardTestCase {
|
|||
|
||||
IndexMetadata metadata = IndexMetadata.builder("test")
|
||||
.putMapping(
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"{ \"properties\": { \"foo\": { \"type\": "
|
||||
+ fieldType
|
||||
+ ", \"store\": true }, "
|
||||
|
@ -158,7 +157,7 @@ public class ShardGetServiceTests extends IndexShardTestCase {
|
|||
assertEquals(searcher.getIndexReader().maxDoc(), 1); // we refreshed
|
||||
}
|
||||
|
||||
Engine.IndexResult test1 = indexDoc(primary, MapperService.SINGLE_MAPPING_NAME, "1", docToIndex, XContentType.JSON, "foobar");
|
||||
Engine.IndexResult test1 = indexDoc(primary, "1", docToIndex, XContentType.JSON, "foobar");
|
||||
assertTrue(primary.getEngine().refreshNeeded());
|
||||
GetResult testGet1 = primary.getService().getForUpdate("1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM);
|
||||
assertEquals(new String(testGet1.source() == null ? new byte[0] : testGet1.source(), StandardCharsets.UTF_8), expectedResult);
|
||||
|
@ -172,7 +171,7 @@ public class ShardGetServiceTests extends IndexShardTestCase {
|
|||
assertEquals(searcher.getIndexReader().maxDoc(), 2);
|
||||
}
|
||||
|
||||
Engine.IndexResult test2 = indexDoc(primary, MapperService.SINGLE_MAPPING_NAME, "2", docToIndex, XContentType.JSON, "foobar");
|
||||
Engine.IndexResult test2 = indexDoc(primary, "2", docToIndex, XContentType.JSON, "foobar");
|
||||
assertTrue(primary.getEngine().refreshNeeded());
|
||||
GetResult testGet2 = primary.getService()
|
||||
.get("2", new String[] { "foo" }, true, 1, VersionType.INTERNAL, FetchSourceContext.FETCH_SOURCE);
|
||||
|
@ -204,7 +203,7 @@ public class ShardGetServiceTests extends IndexShardTestCase {
|
|||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.build();
|
||||
IndexMetadata metadata = IndexMetadata.builder("index")
|
||||
.putMapping("some_type", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}")
|
||||
.settings(settings)
|
||||
.primaryTerm(0, 1)
|
||||
.build();
|
||||
|
|
|
@ -358,13 +358,13 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
assertThat(snapshot, SnapshotMatchers.size(0));
|
||||
}
|
||||
|
||||
addToTranslogAndList(translog, ops, new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 }));
|
||||
addToTranslogAndList(translog, ops, new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 }));
|
||||
try (Translog.Snapshot snapshot = translog.newSnapshot()) {
|
||||
assertThat(snapshot, SnapshotMatchers.equalsTo(ops));
|
||||
assertThat(snapshot.totalOperations(), equalTo(ops.size()));
|
||||
}
|
||||
|
||||
addToTranslogAndList(translog, ops, new Translog.Delete("test", "2", 1, primaryTerm.get(), newUid("2")));
|
||||
addToTranslogAndList(translog, ops, new Translog.Delete("2", 1, primaryTerm.get(), newUid("2")));
|
||||
try (Translog.Snapshot snapshot = translog.newSnapshot()) {
|
||||
assertThat(snapshot, SnapshotMatchers.equalsTo(ops));
|
||||
assertThat(snapshot.totalOperations(), equalTo(ops.size()));
|
||||
|
@ -454,34 +454,34 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
assertThat(stats.estimatedNumberOfOperations(), equalTo(0));
|
||||
}
|
||||
assertThat((int) firstOperationPosition, greaterThan(CodecUtil.headerLength(TranslogHeader.TRANSLOG_CODEC)));
|
||||
translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 }));
|
||||
translog.add(new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 }));
|
||||
|
||||
{
|
||||
final TranslogStats stats = stats();
|
||||
assertThat(stats.estimatedNumberOfOperations(), equalTo(1));
|
||||
assertThat(stats.getTranslogSizeInBytes(), equalTo(162L));
|
||||
assertThat(stats.getTranslogSizeInBytes(), equalTo(157L));
|
||||
assertThat(stats.getUncommittedOperations(), equalTo(1));
|
||||
assertThat(stats.getUncommittedSizeInBytes(), equalTo(107L));
|
||||
assertThat(stats.getUncommittedSizeInBytes(), equalTo(102L));
|
||||
assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L));
|
||||
}
|
||||
|
||||
translog.add(new Translog.Delete("test", "2", 1, primaryTerm.get(), newUid("2")));
|
||||
translog.add(new Translog.Delete("2", 1, primaryTerm.get(), newUid("2")));
|
||||
{
|
||||
final TranslogStats stats = stats();
|
||||
assertThat(stats.estimatedNumberOfOperations(), equalTo(2));
|
||||
assertThat(stats.getTranslogSizeInBytes(), equalTo(210L));
|
||||
assertThat(stats.getTranslogSizeInBytes(), equalTo(200L));
|
||||
assertThat(stats.getUncommittedOperations(), equalTo(2));
|
||||
assertThat(stats.getUncommittedSizeInBytes(), equalTo(155L));
|
||||
assertThat(stats.getUncommittedSizeInBytes(), equalTo(145L));
|
||||
assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L));
|
||||
}
|
||||
|
||||
translog.add(new Translog.Delete("test", "3", 2, primaryTerm.get(), newUid("3")));
|
||||
translog.add(new Translog.Delete("3", 2, primaryTerm.get(), newUid("3")));
|
||||
{
|
||||
final TranslogStats stats = stats();
|
||||
assertThat(stats.estimatedNumberOfOperations(), equalTo(3));
|
||||
assertThat(stats.getTranslogSizeInBytes(), equalTo(258L));
|
||||
assertThat(stats.getTranslogSizeInBytes(), equalTo(243L));
|
||||
assertThat(stats.getUncommittedOperations(), equalTo(3));
|
||||
assertThat(stats.getUncommittedSizeInBytes(), equalTo(203L));
|
||||
assertThat(stats.getUncommittedSizeInBytes(), equalTo(188L));
|
||||
assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L));
|
||||
}
|
||||
|
||||
|
@ -489,9 +489,9 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
{
|
||||
final TranslogStats stats = stats();
|
||||
assertThat(stats.estimatedNumberOfOperations(), equalTo(4));
|
||||
assertThat(stats.getTranslogSizeInBytes(), equalTo(300L));
|
||||
assertThat(stats.getTranslogSizeInBytes(), equalTo(285L));
|
||||
assertThat(stats.getUncommittedOperations(), equalTo(4));
|
||||
assertThat(stats.getUncommittedSizeInBytes(), equalTo(245L));
|
||||
assertThat(stats.getUncommittedSizeInBytes(), equalTo(230L));
|
||||
assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L));
|
||||
}
|
||||
|
||||
|
@ -499,9 +499,9 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
{
|
||||
final TranslogStats stats = stats();
|
||||
assertThat(stats.estimatedNumberOfOperations(), equalTo(4));
|
||||
assertThat(stats.getTranslogSizeInBytes(), equalTo(355L));
|
||||
assertThat(stats.getTranslogSizeInBytes(), equalTo(340L));
|
||||
assertThat(stats.getUncommittedOperations(), equalTo(4));
|
||||
assertThat(stats.getUncommittedSizeInBytes(), equalTo(300L));
|
||||
assertThat(stats.getUncommittedSizeInBytes(), equalTo(285L));
|
||||
assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L));
|
||||
}
|
||||
|
||||
|
@ -511,7 +511,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
stats.writeTo(out);
|
||||
final TranslogStats copy = new TranslogStats(out.bytes().streamInput());
|
||||
assertThat(copy.estimatedNumberOfOperations(), equalTo(4));
|
||||
assertThat(copy.getTranslogSizeInBytes(), equalTo(355L));
|
||||
assertThat(copy.getTranslogSizeInBytes(), equalTo(340L));
|
||||
|
||||
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
|
||||
builder.startObject();
|
||||
|
@ -521,9 +521,9 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
Strings.toString(builder),
|
||||
equalTo(
|
||||
"{\"translog\":{\"operations\":4,\"size_in_bytes\":"
|
||||
+ 355
|
||||
+ 340
|
||||
+ ",\"uncommitted_operations\":4,\"uncommitted_size_in_bytes\":"
|
||||
+ 300
|
||||
+ 285
|
||||
+ ",\"earliest_last_modified_age\":"
|
||||
+ stats.getEarliestLastModifiedAge()
|
||||
+ "}}"
|
||||
|
@ -537,7 +537,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
long lastModifiedAge = System.currentTimeMillis() - translog.getCurrent().getLastModifiedTime();
|
||||
final TranslogStats stats = stats();
|
||||
assertThat(stats.estimatedNumberOfOperations(), equalTo(4));
|
||||
assertThat(stats.getTranslogSizeInBytes(), equalTo(355L));
|
||||
assertThat(stats.getTranslogSizeInBytes(), equalTo(340L));
|
||||
assertThat(stats.getUncommittedOperations(), equalTo(0));
|
||||
assertThat(stats.getUncommittedSizeInBytes(), equalTo(firstOperationPosition));
|
||||
assertThat(stats.getEarliestLastModifiedAge(), greaterThanOrEqualTo(lastModifiedAge));
|
||||
|
@ -553,7 +553,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
int uncommittedOps = 0;
|
||||
int operationsInLastGen = 0;
|
||||
for (int i = 0; i < operations; i++) {
|
||||
translog.add(new Translog.Index("test", Integer.toString(i), i, primaryTerm.get(), new byte[] { 1 }));
|
||||
translog.add(new Translog.Index(Integer.toString(i), i, primaryTerm.get(), new byte[] { 1 }));
|
||||
uncommittedOps++;
|
||||
operationsInLastGen++;
|
||||
if (rarely()) {
|
||||
|
@ -634,7 +634,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
assertThat(snapshot, SnapshotMatchers.size(0));
|
||||
}
|
||||
|
||||
addToTranslogAndList(translog, ops, new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 }));
|
||||
addToTranslogAndList(translog, ops, new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 }));
|
||||
|
||||
try (Translog.Snapshot snapshot = translog.newSnapshot(0, Long.MAX_VALUE)) {
|
||||
assertThat(snapshot, SnapshotMatchers.equalsTo(ops));
|
||||
|
@ -656,9 +656,9 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
public void testReadLocation() throws IOException {
|
||||
ArrayList<Translog.Operation> ops = new ArrayList<>();
|
||||
ArrayList<Translog.Location> locs = new ArrayList<>();
|
||||
locs.add(addToTranslogAndList(translog, ops, new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 })));
|
||||
locs.add(addToTranslogAndList(translog, ops, new Translog.Index("test", "2", 1, primaryTerm.get(), new byte[] { 1 })));
|
||||
locs.add(addToTranslogAndList(translog, ops, new Translog.Index("test", "3", 2, primaryTerm.get(), new byte[] { 1 })));
|
||||
locs.add(addToTranslogAndList(translog, ops, new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 })));
|
||||
locs.add(addToTranslogAndList(translog, ops, new Translog.Index("2", 1, primaryTerm.get(), new byte[] { 1 })));
|
||||
locs.add(addToTranslogAndList(translog, ops, new Translog.Index("3", 2, primaryTerm.get(), new byte[] { 1 })));
|
||||
int i = 0;
|
||||
for (Translog.Operation op : ops) {
|
||||
assertEquals(op, translog.readOperation(locs.get(i++)));
|
||||
|
@ -674,16 +674,16 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
toClose.add(snapshot);
|
||||
assertThat(snapshot, SnapshotMatchers.size(0));
|
||||
|
||||
addToTranslogAndList(translog, ops, new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 }));
|
||||
addToTranslogAndList(translog, ops, new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 }));
|
||||
Translog.Snapshot snapshot1 = translog.newSnapshot();
|
||||
toClose.add(snapshot1);
|
||||
|
||||
addToTranslogAndList(translog, ops, new Translog.Index("test", "2", 1, primaryTerm.get(), new byte[] { 2 }));
|
||||
addToTranslogAndList(translog, ops, new Translog.Index("2", 1, primaryTerm.get(), new byte[] { 2 }));
|
||||
|
||||
assertThat(snapshot1, SnapshotMatchers.equalsTo(ops.get(0)));
|
||||
|
||||
translog.rollGeneration();
|
||||
addToTranslogAndList(translog, ops, new Translog.Index("test", "3", 2, primaryTerm.get(), new byte[] { 3 }));
|
||||
addToTranslogAndList(translog, ops, new Translog.Index("3", 2, primaryTerm.get(), new byte[] { 3 }));
|
||||
|
||||
Translog.Snapshot snapshot2 = translog.newSnapshot();
|
||||
toClose.add(snapshot2);
|
||||
|
@ -697,7 +697,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
|
||||
public void testSnapshotOnClosedTranslog() throws IOException {
|
||||
assertTrue(Files.exists(translogDir.resolve(Translog.getFilename(1))));
|
||||
translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 }));
|
||||
translog.add(new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 }));
|
||||
translog.close();
|
||||
AlreadyClosedException ex = expectThrows(AlreadyClosedException.class, () -> translog.newSnapshot());
|
||||
assertEquals(ex.getMessage(), "translog is already closed");
|
||||
|
@ -719,13 +719,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
}
|
||||
List<Translog.Operation> ops = new ArrayList<>(seqNos.size());
|
||||
for (long seqNo : seqNos) {
|
||||
Translog.Index op = new Translog.Index(
|
||||
"_doc",
|
||||
randomAlphaOfLength(10),
|
||||
seqNo,
|
||||
primaryTerm.get(),
|
||||
new byte[] { randomByte() }
|
||||
);
|
||||
Translog.Index op = new Translog.Index(randomAlphaOfLength(10), seqNo, primaryTerm.get(), new byte[] { randomByte() });
|
||||
translog.add(op);
|
||||
ops.add(op);
|
||||
}
|
||||
|
@ -786,13 +780,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
Collections.shuffle(seqNos, new Random(100));
|
||||
List<Translog.Operation> ops = new ArrayList<>(seqNos.size());
|
||||
for (long seqNo : seqNos) {
|
||||
Translog.Index op = new Translog.Index(
|
||||
"_doc",
|
||||
randomAlphaOfLength(10),
|
||||
seqNo,
|
||||
primaryTerm.get(),
|
||||
new byte[] { randomByte() }
|
||||
);
|
||||
Translog.Index op = new Translog.Index(randomAlphaOfLength(10), seqNo, primaryTerm.get(), new byte[] { randomByte() });
|
||||
boolean shouldAdd = !withMissingOps || seqNo % 4 != 0;
|
||||
if (shouldAdd) {
|
||||
translog.add(op);
|
||||
|
@ -928,7 +916,6 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
Translog.Index expIndexOp = (Translog.Index) expectedOp;
|
||||
assertEquals(expIndexOp.id(), indexOp.id());
|
||||
assertEquals(expIndexOp.routing(), indexOp.routing());
|
||||
assertEquals(expIndexOp.type(), indexOp.type());
|
||||
assertEquals(expIndexOp.source(), indexOp.source());
|
||||
assertEquals(expIndexOp.version(), indexOp.version());
|
||||
break;
|
||||
|
@ -962,7 +949,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
int translogOperations = randomIntBetween(10, 1000);
|
||||
for (int op = 0; op < translogOperations; op++) {
|
||||
String ascii = randomAlphaOfLengthBetween(1, 50);
|
||||
locations.add(translog.add(new Translog.Index("test", "" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8"))));
|
||||
locations.add(translog.add(new Translog.Index("" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8"))));
|
||||
|
||||
if (rarely()) {
|
||||
translog.rollGeneration();
|
||||
|
@ -989,7 +976,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
int translogOperations = randomIntBetween(10, 100);
|
||||
for (int op = 0; op < translogOperations; op++) {
|
||||
String ascii = randomAlphaOfLengthBetween(1, 50);
|
||||
locations.add(translog.add(new Translog.Index("test", "" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8"))));
|
||||
locations.add(translog.add(new Translog.Index("" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8"))));
|
||||
}
|
||||
translog.sync();
|
||||
|
||||
|
@ -1034,7 +1021,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
|
||||
public void testVerifyTranslogIsNotDeleted() throws IOException {
|
||||
assertFileIsPresent(translog, 1);
|
||||
translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 }));
|
||||
translog.add(new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 }));
|
||||
try (Translog.Snapshot snapshot = translog.newSnapshot()) {
|
||||
assertThat(snapshot, SnapshotMatchers.size(1));
|
||||
assertFileIsPresent(translog, 1);
|
||||
|
@ -1086,10 +1073,10 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
switch (type) {
|
||||
case CREATE:
|
||||
case INDEX:
|
||||
op = new Translog.Index("type", "" + id, id, primaryTerm.get(), new byte[] { (byte) id });
|
||||
op = new Translog.Index("" + id, id, primaryTerm.get(), new byte[] { (byte) id });
|
||||
break;
|
||||
case DELETE:
|
||||
op = new Translog.Delete("test", Long.toString(id), id, primaryTerm.get(), newUid(Long.toString(id)));
|
||||
op = new Translog.Delete(Long.toString(id), id, primaryTerm.get(), newUid(Long.toString(id)));
|
||||
break;
|
||||
case NO_OP:
|
||||
op = new Translog.NoOp(id, 1, Long.toString(id));
|
||||
|
@ -1248,7 +1235,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
for (int op = 0; op < translogOperations; op++) {
|
||||
int seqNo = ++count;
|
||||
final Translog.Location location = translog.add(
|
||||
new Translog.Index("test", "" + op, seqNo, primaryTerm.get(), Integer.toString(seqNo).getBytes(Charset.forName("UTF-8")))
|
||||
new Translog.Index("" + op, seqNo, primaryTerm.get(), Integer.toString(seqNo).getBytes(Charset.forName("UTF-8")))
|
||||
);
|
||||
if (randomBoolean()) {
|
||||
assertTrue("at least one operation pending", translog.syncNeeded());
|
||||
|
@ -1257,13 +1244,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
assertFalse("the last call to ensureSycned synced all previous ops", translog.syncNeeded());
|
||||
seqNo = ++count;
|
||||
translog.add(
|
||||
new Translog.Index(
|
||||
"test",
|
||||
"" + op,
|
||||
seqNo,
|
||||
primaryTerm.get(),
|
||||
Integer.toString(seqNo).getBytes(Charset.forName("UTF-8"))
|
||||
)
|
||||
new Translog.Index("" + op, seqNo, primaryTerm.get(), Integer.toString(seqNo).getBytes(Charset.forName("UTF-8")))
|
||||
);
|
||||
assertTrue("one pending operation", translog.syncNeeded());
|
||||
assertFalse("this op has been synced before", translog.ensureSynced(location)); // not syncing now
|
||||
|
@ -1293,7 +1274,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
translog.rollGeneration();
|
||||
}
|
||||
final Translog.Location location = translog.add(
|
||||
new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(++count).getBytes(Charset.forName("UTF-8")))
|
||||
new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(++count).getBytes(Charset.forName("UTF-8")))
|
||||
);
|
||||
locations.add(location);
|
||||
}
|
||||
|
@ -1325,7 +1306,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
for (int op = 0; op < translogOperations; op++) {
|
||||
locations.add(
|
||||
translog.add(
|
||||
new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(++count).getBytes(Charset.forName("UTF-8")))
|
||||
new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(++count).getBytes(Charset.forName("UTF-8")))
|
||||
)
|
||||
);
|
||||
if (rarely() && translogOperations > op + 1) {
|
||||
|
@ -1364,9 +1345,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
long lastSyncedGlobalCheckpoint = globalCheckpoint.get();
|
||||
for (int op = 0; op < translogOperations; op++) {
|
||||
locations.add(
|
||||
translog.add(
|
||||
new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
|
||||
)
|
||||
translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))))
|
||||
);
|
||||
if (randomBoolean()) {
|
||||
globalCheckpoint.set(globalCheckpoint.get() + randomIntBetween(1, 16));
|
||||
|
@ -1380,7 +1359,6 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
assertEquals(translogOperations, translog.totalOperations());
|
||||
translog.add(
|
||||
new Translog.Index(
|
||||
"test",
|
||||
"" + translogOperations,
|
||||
translogOperations,
|
||||
primaryTerm.get(),
|
||||
|
@ -1730,9 +1708,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
final boolean commitOften = randomBoolean();
|
||||
for (int op = 0; op < translogOperations; op++) {
|
||||
locations.add(
|
||||
translog.add(
|
||||
new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
|
||||
)
|
||||
translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))))
|
||||
);
|
||||
final boolean commit = commitOften ? frequently() : rarely();
|
||||
if (commit && op < translogOperations - 1) {
|
||||
|
@ -1791,9 +1767,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
final boolean sync = randomBoolean();
|
||||
for (int op = 0; op < translogOperations; op++) {
|
||||
locations.add(
|
||||
translog.add(
|
||||
new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
|
||||
)
|
||||
translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))))
|
||||
);
|
||||
if (op == prepareOp) {
|
||||
translogGeneration = translog.getGeneration();
|
||||
|
@ -1878,9 +1852,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
final boolean sync = randomBoolean();
|
||||
for (int op = 0; op < translogOperations; op++) {
|
||||
locations.add(
|
||||
translog.add(
|
||||
new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
|
||||
)
|
||||
translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))))
|
||||
);
|
||||
if (op == prepareOp) {
|
||||
translogGeneration = translog.getGeneration();
|
||||
|
@ -1968,7 +1940,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
Translog.TranslogGeneration translogGeneration = null;
|
||||
final boolean sync = randomBoolean();
|
||||
for (int op = 0; op < translogOperations; op++) {
|
||||
translog.add(new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(StandardCharsets.UTF_8)));
|
||||
translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(StandardCharsets.UTF_8)));
|
||||
if (op == prepareOp) {
|
||||
translogGeneration = translog.getGeneration();
|
||||
translog.rollGeneration();
|
||||
|
@ -2003,7 +1975,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
assertThat(
|
||||
translogCorruptedException.getMessage(),
|
||||
endsWith(
|
||||
"] is corrupted, checkpoint file translog-3.ckp already exists but has corrupted content: expected Checkpoint{offset=3025, "
|
||||
"] is corrupted, checkpoint file translog-3.ckp already exists but has corrupted content: expected Checkpoint{offset=2750, "
|
||||
+ "numOps=55, generation=3, minSeqNo=45, maxSeqNo=99, globalCheckpoint=-1, minTranslogGeneration=1, trimmedAboveSeqNo=-2} "
|
||||
+ "but got Checkpoint{offset=0, numOps=0, generation=0, minSeqNo=-1, maxSeqNo=-1, globalCheckpoint=-1, "
|
||||
+ "minTranslogGeneration=0, trimmedAboveSeqNo=-2}"
|
||||
|
@ -2050,7 +2022,6 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
int translogOperations = randomIntBetween(10, 100);
|
||||
for (int op = 0; op < translogOperations; op++) {
|
||||
Translog.Index test = new Translog.Index(
|
||||
"test",
|
||||
"" + op,
|
||||
op,
|
||||
primaryTerm.get(),
|
||||
|
@ -2073,7 +2044,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
|
||||
for (int op = 0; op < extraDocs; op++) {
|
||||
String ascii = randomAlphaOfLengthBetween(1, 50);
|
||||
Translog.Index operation = new Translog.Index("test", "" + op, op, primaryTerm.get() - op, ascii.getBytes("UTF-8"));
|
||||
Translog.Index operation = new Translog.Index("" + op, op, primaryTerm.get() - op, ascii.getBytes("UTF-8"));
|
||||
translog.add(operation);
|
||||
}
|
||||
|
||||
|
@ -2093,13 +2064,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
translog.rollGeneration();
|
||||
|
||||
// add a single operation to current with seq# > trimmed seq# but higher primary term
|
||||
Translog.Index operation = new Translog.Index(
|
||||
"test",
|
||||
"" + 1,
|
||||
1L,
|
||||
primaryTerm.get(),
|
||||
randomAlphaOfLengthBetween(1, 50).getBytes("UTF-8")
|
||||
);
|
||||
Translog.Index operation = new Translog.Index("" + 1, 1L, primaryTerm.get(), randomAlphaOfLengthBetween(1, 50).getBytes("UTF-8"));
|
||||
translog.add(operation);
|
||||
|
||||
// it is possible to trim after generation rollover
|
||||
|
@ -2129,7 +2094,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
}
|
||||
|
||||
// use ongoing primaryTerms - or the same as it was
|
||||
Translog.Index operation = new Translog.Index("test", "" + op, op, primaryTerm.get(), source.get().getBytes("UTF-8"));
|
||||
Translog.Index operation = new Translog.Index("" + op, op, primaryTerm.get(), source.get().getBytes("UTF-8"));
|
||||
translog.add(operation);
|
||||
inMemoryTranslog.add(operation);
|
||||
allOperations.add(operation);
|
||||
|
@ -2213,7 +2178,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
Randomness.shuffle(ops);
|
||||
for (int op : ops) {
|
||||
String ascii = randomAlphaOfLengthBetween(1, 50);
|
||||
Translog.Index operation = new Translog.Index("test", "" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8"));
|
||||
Translog.Index operation = new Translog.Index("" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8"));
|
||||
|
||||
failableTLog.add(operation);
|
||||
}
|
||||
|
@ -2271,12 +2236,12 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
for (int op = 0; op < translogOperations; op++) {
|
||||
locations.add(
|
||||
translog.add(
|
||||
new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
|
||||
new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
|
||||
)
|
||||
);
|
||||
locations2.add(
|
||||
translog2.add(
|
||||
new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
|
||||
new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@ -2305,9 +2270,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
int firstUncommitted = 0;
|
||||
for (int op = 0; op < translogOperations; op++) {
|
||||
locations.add(
|
||||
translog.add(
|
||||
new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
|
||||
)
|
||||
translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))))
|
||||
);
|
||||
if (randomBoolean()) {
|
||||
translog.rollGeneration();
|
||||
|
@ -2355,10 +2318,10 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
}
|
||||
|
||||
public void testFailOnClosedWrite() throws IOException {
|
||||
translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8"))));
|
||||
translog.add(new Translog.Index("1", 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8"))));
|
||||
translog.close();
|
||||
try {
|
||||
translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8"))));
|
||||
translog.add(new Translog.Index("1", 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8"))));
|
||||
fail("closed");
|
||||
} catch (AlreadyClosedException ex) {
|
||||
// all is well
|
||||
|
@ -2442,7 +2405,6 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
case CREATE:
|
||||
case INDEX:
|
||||
op = new Translog.Index(
|
||||
"test",
|
||||
threadId + "_" + opCount,
|
||||
seqNoGenerator.getAndIncrement(),
|
||||
primaryTerm.get(),
|
||||
|
@ -2451,7 +2413,6 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
break;
|
||||
case DELETE:
|
||||
op = new Translog.Delete(
|
||||
"test",
|
||||
threadId + "_" + opCount,
|
||||
new Term("_uid", threadId + "_" + opCount),
|
||||
seqNoGenerator.getAndIncrement(),
|
||||
|
@ -2499,7 +2460,6 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
locations.add(
|
||||
translog.add(
|
||||
new Translog.Index(
|
||||
"test",
|
||||
"" + opsSynced,
|
||||
opsSynced,
|
||||
primaryTerm.get(),
|
||||
|
@ -2529,7 +2489,6 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
locations.add(
|
||||
translog.add(
|
||||
new Translog.Index(
|
||||
"test",
|
||||
"" + opsSynced,
|
||||
opsSynced,
|
||||
primaryTerm.get(),
|
||||
|
@ -2611,7 +2570,6 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
locations.add(
|
||||
translog.add(
|
||||
new Translog.Index(
|
||||
"test",
|
||||
"" + opsAdded,
|
||||
opsAdded,
|
||||
primaryTerm.get(),
|
||||
|
@ -2640,13 +2598,11 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
TranslogConfig config = getTranslogConfig(tempDir);
|
||||
Translog translog = getFailableTranslog(fail, config, false, true, null, createTranslogDeletionPolicy());
|
||||
LineFileDocs lineFileDocs = new LineFileDocs(random()); // writes pretty big docs so we cross buffer boarders regularly
|
||||
translog.add(
|
||||
new Translog.Index("test", "1", 0, primaryTerm.get(), lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8")))
|
||||
);
|
||||
translog.add(new Translog.Index("1", 0, primaryTerm.get(), lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8"))));
|
||||
fail.failAlways();
|
||||
try {
|
||||
Translog.Location location = translog.add(
|
||||
new Translog.Index("test", "2", 1, primaryTerm.get(), lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8")))
|
||||
new Translog.Index("2", 1, primaryTerm.get(), lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8")))
|
||||
);
|
||||
if (randomBoolean()) {
|
||||
translog.ensureSynced(location);
|
||||
|
@ -2772,13 +2728,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
int op = 0;
|
||||
for (; op < translogOperations / 2; op++) {
|
||||
translog.add(
|
||||
new Translog.Index(
|
||||
"_doc",
|
||||
Integer.toString(op),
|
||||
op,
|
||||
primaryTerm.get(),
|
||||
Integer.toString(op).getBytes(Charset.forName("UTF-8"))
|
||||
)
|
||||
new Translog.Index(Integer.toString(op), op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
|
||||
);
|
||||
if (rarely()) {
|
||||
translog.rollGeneration();
|
||||
|
@ -2788,13 +2738,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
long localCheckpoint = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, op);
|
||||
for (op = translogOperations / 2; op < translogOperations; op++) {
|
||||
translog.add(
|
||||
new Translog.Index(
|
||||
"test",
|
||||
Integer.toString(op),
|
||||
op,
|
||||
primaryTerm.get(),
|
||||
Integer.toString(op).getBytes(Charset.forName("UTF-8"))
|
||||
)
|
||||
new Translog.Index(Integer.toString(op), op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
|
||||
);
|
||||
if (rarely()) {
|
||||
translog.rollGeneration();
|
||||
|
@ -2847,13 +2791,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
int op = 0;
|
||||
for (; op < translogOperations / 2; op++) {
|
||||
translog.add(
|
||||
new Translog.Index(
|
||||
"test",
|
||||
Integer.toString(op),
|
||||
op,
|
||||
primaryTerm.get(),
|
||||
Integer.toString(op).getBytes(Charset.forName("UTF-8"))
|
||||
)
|
||||
new Translog.Index(Integer.toString(op), op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
|
||||
);
|
||||
if (rarely()) {
|
||||
translog.rollGeneration();
|
||||
|
@ -2863,13 +2801,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
localCheckpoint = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, op);
|
||||
for (op = translogOperations / 2; op < translogOperations; op++) {
|
||||
translog.add(
|
||||
new Translog.Index(
|
||||
"test",
|
||||
Integer.toString(op),
|
||||
op,
|
||||
primaryTerm.get(),
|
||||
Integer.toString(op).getBytes(Charset.forName("UTF-8"))
|
||||
)
|
||||
new Translog.Index(Integer.toString(op), op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
|
||||
);
|
||||
if (rarely()) {
|
||||
translog.rollGeneration();
|
||||
|
@ -3132,7 +3064,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
Path tempDir = createTempDir();
|
||||
TranslogConfig config = getTranslogConfig(tempDir);
|
||||
Translog translog = createTranslog(config);
|
||||
translog.add(new Translog.Index("test", "boom", 0, primaryTerm.get(), "boom".getBytes(Charset.forName("UTF-8"))));
|
||||
translog.add(new Translog.Index("boom", 0, primaryTerm.get(), "boom".getBytes(Charset.forName("UTF-8"))));
|
||||
translog.close();
|
||||
try {
|
||||
new Translog(
|
||||
|
@ -3161,7 +3093,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
}
|
||||
|
||||
public void testRecoverWithUnbackedNextGen() throws IOException {
|
||||
translog.add(new Translog.Index("test", "" + 0, 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8"))));
|
||||
translog.add(new Translog.Index("" + 0, 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8"))));
|
||||
translog.close();
|
||||
TranslogConfig config = translog.getConfig();
|
||||
|
||||
|
@ -3176,7 +3108,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
assertNotNull("operation 1 must be non-null", op);
|
||||
assertEquals("payload mismatch for operation 1", 1, Integer.parseInt(op.getSource().source.utf8ToString()));
|
||||
|
||||
tlog.add(new Translog.Index("test", "" + 1, 1, primaryTerm.get(), Integer.toString(2).getBytes(Charset.forName("UTF-8"))));
|
||||
tlog.add(new Translog.Index("" + 1, 1, primaryTerm.get(), Integer.toString(2).getBytes(Charset.forName("UTF-8"))));
|
||||
}
|
||||
|
||||
try (Translog tlog = openTranslog(config, translog.getTranslogUUID()); Translog.Snapshot snapshot = tlog.newSnapshot()) {
|
||||
|
@ -3193,7 +3125,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
}
|
||||
|
||||
public void testRecoverWithUnbackedNextGenInIllegalState() throws IOException {
|
||||
translog.add(new Translog.Index("test", "" + 0, 0, primaryTerm.get(), Integer.toString(0).getBytes(Charset.forName("UTF-8"))));
|
||||
translog.add(new Translog.Index("" + 0, 0, primaryTerm.get(), Integer.toString(0).getBytes(Charset.forName("UTF-8"))));
|
||||
translog.close();
|
||||
TranslogConfig config = translog.getConfig();
|
||||
Path ckp = config.getTranslogPath().resolve(Translog.CHECKPOINT_FILE_NAME);
|
||||
|
@ -3217,7 +3149,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
}
|
||||
|
||||
public void testRecoverWithUnbackedNextGenAndFutureFile() throws IOException {
|
||||
translog.add(new Translog.Index("test", "" + 0, 0, primaryTerm.get(), Integer.toString(0).getBytes(Charset.forName("UTF-8"))));
|
||||
translog.add(new Translog.Index("" + 0, 0, primaryTerm.get(), Integer.toString(0).getBytes(Charset.forName("UTF-8"))));
|
||||
translog.close();
|
||||
TranslogConfig config = translog.getConfig();
|
||||
final String translogUUID = translog.getTranslogUUID();
|
||||
|
@ -3247,7 +3179,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.utf8ToString()));
|
||||
}
|
||||
}
|
||||
tlog.add(new Translog.Index("test", "" + 1, 1, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8"))));
|
||||
tlog.add(new Translog.Index("" + 1, 1, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8"))));
|
||||
}
|
||||
|
||||
TranslogException ex = expectThrows(
|
||||
|
@ -3293,7 +3225,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
for (int opsAdded = 0; opsAdded < numOps; opsAdded++) {
|
||||
String doc = lineFileDocs.nextDoc().toString();
|
||||
failableTLog.add(
|
||||
new Translog.Index("test", "" + opsAdded, opsAdded, primaryTerm.get(), doc.getBytes(Charset.forName("UTF-8")))
|
||||
new Translog.Index("" + opsAdded, opsAdded, primaryTerm.get(), doc.getBytes(Charset.forName("UTF-8")))
|
||||
);
|
||||
unsynced.add(doc);
|
||||
if (randomBoolean()) {
|
||||
|
@ -3464,7 +3396,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
* Tests that closing views after the translog is fine and we can reopen the translog
|
||||
*/
|
||||
public void testPendingDelete() throws IOException {
|
||||
translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 }));
|
||||
translog.add(new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 }));
|
||||
translog.rollGeneration();
|
||||
TranslogConfig config = translog.getConfig();
|
||||
final String translogUUID = translog.getTranslogUUID();
|
||||
|
@ -3478,10 +3410,10 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
primaryTerm::get,
|
||||
seqNo -> {}
|
||||
);
|
||||
translog.add(new Translog.Index("test", "2", 1, primaryTerm.get(), new byte[] { 2 }));
|
||||
translog.add(new Translog.Index("2", 1, primaryTerm.get(), new byte[] { 2 }));
|
||||
translog.rollGeneration();
|
||||
Closeable lock = translog.acquireRetentionLock();
|
||||
translog.add(new Translog.Index("test", "3", 2, primaryTerm.get(), new byte[] { 3 }));
|
||||
translog.add(new Translog.Index("3", 2, primaryTerm.get(), new byte[] { 3 }));
|
||||
translog.close();
|
||||
IOUtils.close(lock);
|
||||
translog = new Translog(
|
||||
|
@ -3515,17 +3447,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
document.add(seqID.seqNo);
|
||||
document.add(seqID.seqNoDocValue);
|
||||
document.add(seqID.primaryTerm);
|
||||
ParsedDocument doc = new ParsedDocument(
|
||||
versionField,
|
||||
seqID,
|
||||
"1",
|
||||
"type",
|
||||
null,
|
||||
Arrays.asList(document),
|
||||
B_1,
|
||||
XContentType.JSON,
|
||||
null
|
||||
);
|
||||
ParsedDocument doc = new ParsedDocument(versionField, seqID, "1", null, Arrays.asList(document), B_1, XContentType.JSON, null);
|
||||
|
||||
Engine.Index eIndex = new Engine.Index(
|
||||
newUid(doc),
|
||||
|
@ -3554,7 +3476,6 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
assertEquals(index, serializedIndex);
|
||||
|
||||
Engine.Delete eDelete = new Engine.Delete(
|
||||
doc.type(),
|
||||
doc.id(),
|
||||
newUid(doc),
|
||||
randomSeqNum,
|
||||
|
@ -3793,7 +3714,6 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
final int operations = randomIntBetween(1, 100);
|
||||
for (int i = 0; i < operations; i++) {
|
||||
Translog.Index op = new Translog.Index(
|
||||
"doc",
|
||||
randomAlphaOfLength(10),
|
||||
seqNo.getAndIncrement(),
|
||||
primaryTerm.get(),
|
||||
|
@ -3823,7 +3743,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
List<Long> batch = LongStream.rangeClosed(0, between(0, 500)).boxed().collect(Collectors.toList());
|
||||
Randomness.shuffle(batch);
|
||||
for (Long seqNo : batch) {
|
||||
Translog.Index op = new Translog.Index("doc", randomAlphaOfLength(10), seqNo, primaryTerm.get(), new byte[] { 1 });
|
||||
Translog.Index op = new Translog.Index(randomAlphaOfLength(10), seqNo, primaryTerm.get(), new byte[] { 1 });
|
||||
translog.add(op);
|
||||
latestOperations.put(op.seqNo(), op);
|
||||
}
|
||||
|
@ -3838,7 +3758,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
public void testCloseSnapshotTwice() throws Exception {
|
||||
int numOps = between(0, 10);
|
||||
for (int i = 0; i < numOps; i++) {
|
||||
Translog.Index op = new Translog.Index("doc", randomAlphaOfLength(10), i, primaryTerm.get(), new byte[] { 1 });
|
||||
Translog.Index op = new Translog.Index(randomAlphaOfLength(10), i, primaryTerm.get(), new byte[] { 1 });
|
||||
translog.add(op);
|
||||
if (randomBoolean()) {
|
||||
translog.rollGeneration();
|
||||
|
@ -3912,7 +3832,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
Randomness.shuffle(seqNos);
|
||||
for (long seqNo : seqNos) {
|
||||
if (frequently()) {
|
||||
translog.add(new Translog.Index("test", "id", seqNo, primaryTerm.get(), new byte[] { 1 }));
|
||||
translog.add(new Translog.Index("id", seqNo, primaryTerm.get(), new byte[] { 1 }));
|
||||
maxSeqNoPerGeneration.compute(
|
||||
translog.currentFileGeneration(),
|
||||
(key, existing) -> existing == null ? seqNo : Math.max(existing, seqNo)
|
||||
|
@ -4050,9 +3970,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
int iterations = randomIntBetween(10, 100);
|
||||
for (int i = 0; i < iterations; i++) {
|
||||
List<Translog.Operation> ops = IntStream.range(0, between(1, 10))
|
||||
.mapToObj(
|
||||
n -> new Translog.Index("test", "1", nextSeqNo.incrementAndGet(), primaryTerm.get(), new byte[] { 1 })
|
||||
)
|
||||
.mapToObj(n -> new Translog.Index("1", nextSeqNo.incrementAndGet(), primaryTerm.get(), new byte[] { 1 }))
|
||||
.collect(Collectors.toList());
|
||||
try {
|
||||
Translog.Location location = null;
|
||||
|
@ -4134,7 +4052,7 @@ public class TranslogTests extends OpenSearchTestCase {
|
|||
}
|
||||
};
|
||||
try {
|
||||
translog.add(new Translog.Index("1", "_doc", 1, primaryTerm.get(), new byte[] { 1 }));
|
||||
translog.add(new Translog.Index("1", 1, primaryTerm.get(), new byte[] { 1 }));
|
||||
failedToSyncCheckpoint.set(true);
|
||||
expectThrows(IOException.class, translog::rollGeneration);
|
||||
final AlreadyClosedException alreadyClosedException = expectThrows(AlreadyClosedException.class, translog::rollGeneration);
|
||||
|
|
|
@ -367,7 +367,7 @@ public class IndexingMemoryControllerTests extends IndexShardTestCase {
|
|||
public void testTranslogRecoveryWorksWithIMC() throws IOException {
|
||||
IndexShard shard = newStartedShard(true);
|
||||
for (int i = 0; i < 100; i++) {
|
||||
indexDoc(shard, "_doc", Integer.toString(i), "{\"foo\" : \"bar\"}", XContentType.JSON, null);
|
||||
indexDoc(shard, Integer.toString(i), "{\"foo\" : \"bar\"}", XContentType.JSON, null);
|
||||
}
|
||||
shard.close("simon says", false);
|
||||
AtomicReference<IndexShard> shardRef = new AtomicReference<>();
|
||||
|
|
|
@ -185,7 +185,7 @@ public class PeerRecoveryTargetServiceTests extends IndexShardTestCase {
|
|||
shard.getOperationPrimaryTerm(),
|
||||
IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
|
||||
false,
|
||||
new SourceToParse(shard.shardId().getIndexName(), "_doc", UUIDs.randomBase64UUID(), new BytesArray("{}"), XContentType.JSON)
|
||||
new SourceToParse(shard.shardId().getIndexName(), UUIDs.randomBase64UUID(), new BytesArray("{}"), XContentType.JSON)
|
||||
);
|
||||
if (randomInt(100) < 5) {
|
||||
shard.flush(new FlushRequest().waitIfOngoing(true));
|
||||
|
|
|
@ -462,7 +462,6 @@ public class RecoverySourceHandlerTests extends OpenSearchTestCase {
|
|||
}
|
||||
|
||||
private Engine.Index getIndex(final String id) {
|
||||
final String type = "test";
|
||||
final ParseContext.Document document = new ParseContext.Document();
|
||||
document.add(new TextField("test", "test", Field.Store.YES));
|
||||
final Field idField = new Field("_id", Uid.encodeId(id), IdFieldMapper.Defaults.FIELD_TYPE);
|
||||
|
@ -478,7 +477,6 @@ public class RecoverySourceHandlerTests extends OpenSearchTestCase {
|
|||
versionField,
|
||||
seqID,
|
||||
id,
|
||||
type,
|
||||
null,
|
||||
Arrays.asList(document),
|
||||
source,
|
||||
|
@ -1188,10 +1186,9 @@ public class RecoverySourceHandlerTests extends OpenSearchTestCase {
|
|||
final long seqNo = randomValueOtherThanMany(n -> seqNos.add(n) == false, OpenSearchTestCase::randomNonNegativeLong);
|
||||
final Translog.Operation op;
|
||||
if (randomBoolean()) {
|
||||
op = new Translog.Index("_doc", "id", seqNo, randomNonNegativeLong(), randomNonNegativeLong(), source, null, -1);
|
||||
op = new Translog.Index("id", seqNo, randomNonNegativeLong(), randomNonNegativeLong(), source, null, -1);
|
||||
} else if (randomBoolean()) {
|
||||
op = new Translog.Delete(
|
||||
"_doc",
|
||||
"id",
|
||||
new Term("_id", Uid.encodeId("id")),
|
||||
seqNo,
|
||||
|
|
|
@ -161,7 +161,7 @@ public class RecoveryTests extends OpenSearchIndexLevelReplicationTestCase {
|
|||
|
||||
// delete #1
|
||||
orgReplica.advanceMaxSeqNoOfUpdatesOrDeletes(1); // manually advance msu for this delete
|
||||
orgReplica.applyDeleteOperationOnReplica(1, primaryTerm, 2, "type", "id");
|
||||
orgReplica.applyDeleteOperationOnReplica(1, primaryTerm, 2, "id");
|
||||
orgReplica.flush(new FlushRequest().force(true)); // isolate delete#1 in its own translog generation and lucene segment
|
||||
// index #0
|
||||
orgReplica.applyIndexOperationOnReplica(
|
||||
|
@ -170,7 +170,7 @@ public class RecoveryTests extends OpenSearchIndexLevelReplicationTestCase {
|
|||
1,
|
||||
IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
|
||||
false,
|
||||
new SourceToParse(indexName, "type", "id", new BytesArray("{}"), XContentType.JSON)
|
||||
new SourceToParse(indexName, "id", new BytesArray("{}"), XContentType.JSON)
|
||||
);
|
||||
// index #3
|
||||
orgReplica.applyIndexOperationOnReplica(
|
||||
|
@ -179,7 +179,7 @@ public class RecoveryTests extends OpenSearchIndexLevelReplicationTestCase {
|
|||
1,
|
||||
IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
|
||||
false,
|
||||
new SourceToParse(indexName, "type", "id-3", new BytesArray("{}"), XContentType.JSON)
|
||||
new SourceToParse(indexName, "id-3", new BytesArray("{}"), XContentType.JSON)
|
||||
);
|
||||
// Flushing a new commit with local checkpoint=1 allows to delete the translog gen #1.
|
||||
orgReplica.flush(new FlushRequest().force(true).waitIfOngoing(true));
|
||||
|
@ -190,7 +190,7 @@ public class RecoveryTests extends OpenSearchIndexLevelReplicationTestCase {
|
|||
1,
|
||||
IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
|
||||
false,
|
||||
new SourceToParse(indexName, "type", "id-2", new BytesArray("{}"), XContentType.JSON)
|
||||
new SourceToParse(indexName, "id-2", new BytesArray("{}"), XContentType.JSON)
|
||||
);
|
||||
orgReplica.sync(); // advance local checkpoint
|
||||
orgReplica.updateGlobalCheckpointOnReplica(3L, "test");
|
||||
|
@ -201,7 +201,7 @@ public class RecoveryTests extends OpenSearchIndexLevelReplicationTestCase {
|
|||
1,
|
||||
IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
|
||||
false,
|
||||
new SourceToParse(indexName, "type", "id-5", new BytesArray("{}"), XContentType.JSON)
|
||||
new SourceToParse(indexName, "id-5", new BytesArray("{}"), XContentType.JSON)
|
||||
);
|
||||
|
||||
if (randomBoolean()) {
|
||||
|
@ -310,13 +310,7 @@ public class RecoveryTests extends OpenSearchIndexLevelReplicationTestCase {
|
|||
Engine.IndexResult result = primaryShard.applyIndexOperationOnPrimary(
|
||||
Versions.MATCH_ANY,
|
||||
VersionType.INTERNAL,
|
||||
new SourceToParse(
|
||||
primaryShard.shardId().getIndexName(),
|
||||
"_doc",
|
||||
Integer.toString(i),
|
||||
new BytesArray("{}"),
|
||||
XContentType.JSON
|
||||
),
|
||||
new SourceToParse(primaryShard.shardId().getIndexName(), Integer.toString(i), new BytesArray("{}"), XContentType.JSON),
|
||||
SequenceNumbers.UNASSIGNED_SEQ_NO,
|
||||
0,
|
||||
IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
|
||||
|
|
|
@ -100,7 +100,6 @@ public class CategoryContextMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument parsedDocument = defaultMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type1",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
jsonBuilder().startObject()
|
||||
|
@ -153,7 +152,6 @@ public class CategoryContextMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument parsedDocument = defaultMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type1",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
jsonBuilder().startObject()
|
||||
|
@ -201,7 +199,6 @@ public class CategoryContextMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument parsedDocument = defaultMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type1",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
jsonBuilder().startObject()
|
||||
|
@ -249,7 +246,6 @@ public class CategoryContextMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument parsedDocument = defaultMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type1",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
jsonBuilder().startObject()
|
||||
|
@ -307,7 +303,7 @@ public class CategoryContextMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
|
||||
Exception e = expectThrows(
|
||||
MapperParsingException.class,
|
||||
() -> defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON))
|
||||
() -> defaultMapper.parse(new SourceToParse("test", "1", BytesReference.bytes(builder), XContentType.JSON))
|
||||
);
|
||||
assertEquals(
|
||||
"contexts must be a string, number or boolean or a list of string, number or boolean, but was [VALUE_NULL]",
|
||||
|
@ -341,7 +337,6 @@ public class CategoryContextMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument parsedDocument = defaultMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type1",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
jsonBuilder().startObject()
|
||||
|
@ -387,7 +382,6 @@ public class CategoryContextMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
ParsedDocument parsedDocument = defaultMapper.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
"type1",
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
jsonBuilder().startObject()
|
||||
|
@ -441,7 +435,7 @@ public class CategoryContextMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
|
||||
Exception e = expectThrows(
|
||||
MapperParsingException.class,
|
||||
() -> defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON))
|
||||
() -> defaultMapper.parse(new SourceToParse("test", "1", BytesReference.bytes(builder), XContentType.JSON))
|
||||
);
|
||||
assertEquals("context array must have string, number or boolean values, but was [VALUE_NULL]", e.getCause().getMessage());
|
||||
}
|
||||
|
@ -486,7 +480,7 @@ public class CategoryContextMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
.endArray()
|
||||
.endObject();
|
||||
ParsedDocument parsedDocument = defaultMapper.parse(
|
||||
new SourceToParse("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON)
|
||||
new SourceToParse("test", "1", BytesReference.bytes(builder), XContentType.JSON)
|
||||
);
|
||||
IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
||||
assertContextSuggestFields(fields, 3);
|
||||
|
|
|
@ -83,7 +83,6 @@ public class GeoContextMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
jsonBuilder().startObject()
|
||||
|
@ -131,7 +130,6 @@ public class GeoContextMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
jsonBuilder().startObject()
|
||||
|
@ -177,7 +175,6 @@ public class GeoContextMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
.parse(
|
||||
new SourceToParse(
|
||||
"test",
|
||||
MapperService.SINGLE_MAPPING_NAME,
|
||||
"1",
|
||||
BytesReference.bytes(
|
||||
jsonBuilder().startObject()
|
||||
|
@ -240,7 +237,7 @@ public class GeoContextMappingTests extends OpenSearchSingleNodeTestCase {
|
|||
.endArray()
|
||||
.endObject();
|
||||
ParsedDocument parsedDocument = mapperService.documentMapper()
|
||||
.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(builder), XContentType.JSON));
|
||||
.parse(new SourceToParse("test", "1", BytesReference.bytes(builder), XContentType.JSON));
|
||||
IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name());
|
||||
assertContextSuggestFields(fields, 3);
|
||||
}
|
||||
|
|
|
@ -175,10 +175,6 @@ public abstract class EngineTestCase extends OpenSearchTestCase {
|
|||
// A default primary term is used by engine instances created in this test.
|
||||
protected final PrimaryTermSupplier primaryTerm = new PrimaryTermSupplier(1L);
|
||||
|
||||
protected static void assertVisibleCount(Engine engine, int numDocs) throws IOException {
|
||||
assertVisibleCount(engine, numDocs, true);
|
||||
}
|
||||
|
||||
protected static void assertVisibleCount(Engine engine, int numDocs, boolean refresh) throws IOException {
|
||||
if (refresh) {
|
||||
engine.refresh("test");
|
||||
|
@ -333,14 +329,14 @@ public abstract class EngineTestCase extends OpenSearchTestCase {
|
|||
try {
|
||||
if (engine != null && engine.isClosed.get() == false) {
|
||||
engine.getTranslog().getDeletionPolicy().assertNoOpenTranslogRefs();
|
||||
assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, createMapperService("test"));
|
||||
assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, createMapperService());
|
||||
assertNoInFlightDocuments(engine);
|
||||
assertMaxSeqNoInCommitUserData(engine);
|
||||
assertAtMostOneLuceneDocumentPerSequenceNumber(engine);
|
||||
}
|
||||
if (replicaEngine != null && replicaEngine.isClosed.get() == false) {
|
||||
replicaEngine.getTranslog().getDeletionPolicy().assertNoOpenTranslogRefs();
|
||||
assertConsistentHistoryBetweenTranslogAndLuceneIndex(replicaEngine, createMapperService("test"));
|
||||
assertConsistentHistoryBetweenTranslogAndLuceneIndex(replicaEngine, createMapperService());
|
||||
assertNoInFlightDocuments(replicaEngine);
|
||||
assertMaxSeqNoInCommitUserData(replicaEngine);
|
||||
assertAtMostOneLuceneDocumentPerSequenceNumber(replicaEngine);
|
||||
|
@ -412,21 +408,11 @@ public abstract class EngineTestCase extends OpenSearchTestCase {
|
|||
} else {
|
||||
document.add(new StoredField(SourceFieldMapper.NAME, ref.bytes, ref.offset, ref.length));
|
||||
}
|
||||
return new ParsedDocument(
|
||||
versionField,
|
||||
seqID,
|
||||
id,
|
||||
"test",
|
||||
routing,
|
||||
Arrays.asList(document),
|
||||
source,
|
||||
XContentType.JSON,
|
||||
mappingUpdate
|
||||
);
|
||||
return new ParsedDocument(versionField, seqID, id, routing, Arrays.asList(document), source, XContentType.JSON, mappingUpdate);
|
||||
}
|
||||
|
||||
public static CheckedBiFunction<String, Integer, ParsedDocument, IOException> nestedParsedDocFactory() throws Exception {
|
||||
final MapperService mapperService = createMapperService("type");
|
||||
final MapperService mapperService = createMapperService();
|
||||
final String nestedMapping = Strings.toString(
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
|
@ -450,7 +436,7 @@ public abstract class EngineTestCase extends OpenSearchTestCase {
|
|||
source.endObject();
|
||||
}
|
||||
source.endObject();
|
||||
return nestedMapper.parse(new SourceToParse("test", "type", docId, BytesReference.bytes(source), XContentType.JSON));
|
||||
return nestedMapper.parse(new SourceToParse("test", docId, BytesReference.bytes(source), XContentType.JSON));
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -460,7 +446,7 @@ public abstract class EngineTestCase extends OpenSearchTestCase {
|
|||
public static EngineConfig.TombstoneDocSupplier tombstoneDocSupplier() {
|
||||
return new EngineConfig.TombstoneDocSupplier() {
|
||||
@Override
|
||||
public ParsedDocument newDeleteTombstoneDoc(String type, String id) {
|
||||
public ParsedDocument newDeleteTombstoneDoc(String id) {
|
||||
final ParseContext.Document doc = new ParseContext.Document();
|
||||
Field uidField = new Field(IdFieldMapper.NAME, Uid.encodeId(id), IdFieldMapper.Defaults.FIELD_TYPE);
|
||||
doc.add(uidField);
|
||||
|
@ -476,7 +462,6 @@ public abstract class EngineTestCase extends OpenSearchTestCase {
|
|||
versionField,
|
||||
seqID,
|
||||
id,
|
||||
type,
|
||||
null,
|
||||
Collections.singletonList(doc),
|
||||
new BytesArray("{}"),
|
||||
|
@ -498,17 +483,7 @@ public abstract class EngineTestCase extends OpenSearchTestCase {
|
|||
doc.add(versionField);
|
||||
BytesRef byteRef = new BytesRef(reason);
|
||||
doc.add(new StoredField(SourceFieldMapper.NAME, byteRef.bytes, byteRef.offset, byteRef.length));
|
||||
return new ParsedDocument(
|
||||
versionField,
|
||||
seqID,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
Collections.singletonList(doc),
|
||||
null,
|
||||
XContentType.JSON,
|
||||
null
|
||||
);
|
||||
return new ParsedDocument(versionField, seqID, null, null, Collections.singletonList(doc), null, XContentType.JSON, null);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -991,7 +966,7 @@ public abstract class EngineTestCase extends OpenSearchTestCase {
|
|||
}
|
||||
|
||||
protected Engine.Delete replicaDeleteForDoc(String id, long version, long seqNo, long startTime) {
|
||||
return new Engine.Delete("test", id, newUid(id), seqNo, 1, version, null, REPLICA, startTime, SequenceNumbers.UNASSIGNED_SEQ_NO, 0);
|
||||
return new Engine.Delete(id, newUid(id), seqNo, 1, version, null, REPLICA, startTime, SequenceNumbers.UNASSIGNED_SEQ_NO, 0);
|
||||
}
|
||||
|
||||
protected static void assertVisibleCount(InternalEngine engine, int numDocs) throws IOException {
|
||||
|
@ -1056,7 +1031,6 @@ public abstract class EngineTestCase extends OpenSearchTestCase {
|
|||
);
|
||||
} else {
|
||||
op = new Engine.Delete(
|
||||
"test",
|
||||
docId,
|
||||
id,
|
||||
forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO,
|
||||
|
@ -1115,7 +1089,6 @@ public abstract class EngineTestCase extends OpenSearchTestCase {
|
|||
case DELETE:
|
||||
operations.add(
|
||||
new Engine.Delete(
|
||||
doc.type(),
|
||||
doc.id(),
|
||||
EngineTestCase.newUid(doc),
|
||||
seqNo,
|
||||
|
@ -1478,7 +1451,7 @@ public abstract class EngineTestCase extends OpenSearchTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public static MapperService createMapperService(String type) throws IOException {
|
||||
public static MapperService createMapperService() throws IOException {
|
||||
IndexMetadata indexMetadata = IndexMetadata.builder("test")
|
||||
.settings(
|
||||
Settings.builder()
|
||||
|
@ -1486,7 +1459,7 @@ public abstract class EngineTestCase extends OpenSearchTestCase {
|
|||
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
|
||||
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1)
|
||||
)
|
||||
.putMapping(type, "{\"properties\": {}}")
|
||||
.putMapping("{\"properties\": {}}")
|
||||
.build();
|
||||
MapperService mapperService = MapperTestUtils.newMapperService(
|
||||
new NamedXContentRegistry(ClusterModule.getNamedXWriteables()),
|
||||
|
|
|
@ -44,7 +44,6 @@ import org.opensearch.index.analysis.NamedAnalyzer;
|
|||
import org.opensearch.index.mapper.DocumentMapper;
|
||||
import org.opensearch.index.mapper.DocumentMapperForType;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
import org.opensearch.index.mapper.Mapping;
|
||||
import org.opensearch.index.mapper.RootObjectMapper;
|
||||
import org.opensearch.index.mapper.SourceToParse;
|
||||
import org.opensearch.index.seqno.SequenceNumbers;
|
||||
|
@ -65,8 +64,6 @@ import static java.util.Collections.emptyMap;
|
|||
public class TranslogHandler implements Engine.TranslogRecoveryRunner {
|
||||
|
||||
private final MapperService mapperService;
|
||||
public Mapping mappingUpdate = null;
|
||||
private final Map<String, Mapping> recoveredTypes = new HashMap<>();
|
||||
|
||||
private final AtomicLong appliedOperations = new AtomicLong();
|
||||
|
||||
|
@ -95,21 +92,13 @@ public class TranslogHandler implements Engine.TranslogRecoveryRunner {
|
|||
private DocumentMapperForType docMapper(String type) {
|
||||
RootObjectMapper.Builder rootBuilder = new RootObjectMapper.Builder(type);
|
||||
DocumentMapper.Builder b = new DocumentMapper.Builder(rootBuilder, mapperService);
|
||||
return new DocumentMapperForType(b.build(mapperService), mappingUpdate);
|
||||
return new DocumentMapperForType(b.build(mapperService), null);
|
||||
}
|
||||
|
||||
private void applyOperation(Engine engine, Engine.Operation operation) throws IOException {
|
||||
switch (operation.operationType()) {
|
||||
case INDEX:
|
||||
Engine.Index engineIndex = (Engine.Index) operation;
|
||||
Mapping update = engineIndex.parsedDoc().dynamicMappingsUpdate();
|
||||
if (engineIndex.parsedDoc().dynamicMappingsUpdate() != null) {
|
||||
recoveredTypes.compute(
|
||||
engineIndex.type(),
|
||||
(k, mapping) -> mapping == null ? update : mapping.merge(update, MapperService.MergeReason.MAPPING_RECOVERY)
|
||||
);
|
||||
}
|
||||
engine.index(engineIndex);
|
||||
engine.index((Engine.Index) operation);
|
||||
break;
|
||||
case DELETE:
|
||||
engine.delete((Engine.Delete) operation);
|
||||
|
@ -122,13 +111,6 @@ public class TranslogHandler implements Engine.TranslogRecoveryRunner {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the recovered types modifying the mapping during the recovery
|
||||
*/
|
||||
public Map<String, Mapping> getRecoveredTypes() {
|
||||
return recoveredTypes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int run(Engine engine, Translog.Snapshot snapshot) throws IOException {
|
||||
int opsRecovered = 0;
|
||||
|
@ -150,15 +132,8 @@ public class TranslogHandler implements Engine.TranslogRecoveryRunner {
|
|||
final Translog.Index index = (Translog.Index) operation;
|
||||
final String indexName = mapperService.index().getName();
|
||||
final Engine.Index engineIndex = IndexShard.prepareIndex(
|
||||
docMapper(index.type()),
|
||||
new SourceToParse(
|
||||
indexName,
|
||||
index.type(),
|
||||
index.id(),
|
||||
index.source(),
|
||||
XContentHelper.xContentType(index.source()),
|
||||
index.routing()
|
||||
),
|
||||
docMapper(MapperService.SINGLE_MAPPING_NAME),
|
||||
new SourceToParse(indexName, index.id(), index.source(), XContentHelper.xContentType(index.source()), index.routing()),
|
||||
index.seqNo(),
|
||||
index.primaryTerm(),
|
||||
index.version(),
|
||||
|
@ -173,7 +148,6 @@ public class TranslogHandler implements Engine.TranslogRecoveryRunner {
|
|||
case DELETE:
|
||||
final Translog.Delete delete = (Translog.Delete) operation;
|
||||
final Engine.Delete engineDelete = new Engine.Delete(
|
||||
delete.type(),
|
||||
delete.id(),
|
||||
delete.uid(),
|
||||
delete.seqNo(),
|
||||
|
|
|
@ -183,11 +183,11 @@ public abstract class MapperServiceTestCase extends OpenSearchTestCase {
|
|||
XContentBuilder builder = JsonXContent.contentBuilder().startObject();
|
||||
build.accept(builder);
|
||||
builder.endObject();
|
||||
return new SourceToParse("test", "_doc", "1", BytesReference.bytes(builder), XContentType.JSON);
|
||||
return new SourceToParse("test", "1", BytesReference.bytes(builder), XContentType.JSON);
|
||||
}
|
||||
|
||||
protected final SourceToParse source(String source) {
|
||||
return new SourceToParse("test", "_doc", "1", new BytesArray(source), XContentType.JSON);
|
||||
return new SourceToParse("test", "1", new BytesArray(source), XContentType.JSON);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -85,6 +85,7 @@ import org.opensearch.index.engine.DocIdSeqNoAndSource;
|
|||
import org.opensearch.index.engine.EngineConfigFactory;
|
||||
import org.opensearch.index.engine.EngineFactory;
|
||||
import org.opensearch.index.engine.InternalEngineFactory;
|
||||
import org.opensearch.index.mapper.MapperService;
|
||||
import org.opensearch.index.seqno.GlobalCheckpointSyncAction;
|
||||
import org.opensearch.index.seqno.RetentionLease;
|
||||
import org.opensearch.index.seqno.RetentionLeaseSyncAction;
|
||||
|
@ -108,7 +109,6 @@ import java.util.Collections;
|
|||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import java.util.concurrent.Future;
|
||||
|
@ -128,7 +128,7 @@ public abstract class OpenSearchIndexLevelReplicationTestCase extends IndexShard
|
|||
|
||||
protected final Index index = new Index("test", "uuid");
|
||||
private final ShardId shardId = new ShardId(index, 0);
|
||||
protected final Map<String, String> indexMapping = Collections.singletonMap("type", "{ \"type\": {} }");
|
||||
protected final String indexMapping = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": {} }";
|
||||
|
||||
protected ReplicationGroup createGroup(int replicas) throws IOException {
|
||||
return createGroup(replicas, Settings.EMPTY);
|
||||
|
@ -143,11 +143,11 @@ public abstract class OpenSearchIndexLevelReplicationTestCase extends IndexShard
|
|||
return buildIndexMetadata(replicas, indexMapping);
|
||||
}
|
||||
|
||||
protected IndexMetadata buildIndexMetadata(int replicas, Map<String, String> mappings) throws IOException {
|
||||
protected IndexMetadata buildIndexMetadata(int replicas, String mappings) throws IOException {
|
||||
return buildIndexMetadata(replicas, Settings.EMPTY, mappings);
|
||||
}
|
||||
|
||||
protected IndexMetadata buildIndexMetadata(int replicas, Settings indexSettings, Map<String, String> mappings) throws IOException {
|
||||
protected IndexMetadata buildIndexMetadata(int replicas, Settings indexSettings, String mappings) throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, replicas)
|
||||
|
@ -155,10 +155,11 @@ public abstract class OpenSearchIndexLevelReplicationTestCase extends IndexShard
|
|||
.put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), between(0, 1000))
|
||||
.put(indexSettings)
|
||||
.build();
|
||||
IndexMetadata.Builder metadata = IndexMetadata.builder(index.getName()).settings(settings).primaryTerm(0, randomIntBetween(1, 100));
|
||||
for (Map.Entry<String, String> typeMapping : mappings.entrySet()) {
|
||||
metadata.putMapping(typeMapping.getKey(), typeMapping.getValue());
|
||||
}
|
||||
IndexMetadata.Builder metadata = IndexMetadata.builder(index.getName())
|
||||
.settings(settings)
|
||||
.putMapping(mappings)
|
||||
.primaryTerm(0, randomIntBetween(1, 100));
|
||||
|
||||
return metadata.build();
|
||||
}
|
||||
|
||||
|
|
|
@ -279,7 +279,7 @@ public abstract class IndexShardTestCase extends OpenSearchTestCase {
|
|||
IndexMetadata.Builder metadata = IndexMetadata.builder(shardRouting.getIndexName())
|
||||
.settings(indexSettings)
|
||||
.primaryTerm(0, primaryTerm)
|
||||
.putMapping("_doc", "{ \"properties\": {} }");
|
||||
.putMapping("{ \"properties\": {} }");
|
||||
return newShard(shardRouting, metadata.build(), null, engineFactory, () -> {}, RetentionLeaseSyncer.EMPTY, listeners);
|
||||
}
|
||||
|
||||
|
@ -877,25 +877,12 @@ public abstract class IndexShardTestCase extends OpenSearchTestCase {
|
|||
}
|
||||
|
||||
protected Engine.IndexResult indexDoc(IndexShard shard, String type, String id, String source) throws IOException {
|
||||
return indexDoc(shard, type, id, source, XContentType.JSON, null);
|
||||
return indexDoc(shard, id, source, XContentType.JSON, null);
|
||||
}
|
||||
|
||||
protected Engine.IndexResult indexDoc(
|
||||
IndexShard shard,
|
||||
String type,
|
||||
String id,
|
||||
String source,
|
||||
XContentType xContentType,
|
||||
String routing
|
||||
) throws IOException {
|
||||
SourceToParse sourceToParse = new SourceToParse(
|
||||
shard.shardId().getIndexName(),
|
||||
type,
|
||||
id,
|
||||
new BytesArray(source),
|
||||
xContentType,
|
||||
routing
|
||||
);
|
||||
protected Engine.IndexResult indexDoc(IndexShard shard, String id, String source, XContentType xContentType, String routing)
|
||||
throws IOException {
|
||||
SourceToParse sourceToParse = new SourceToParse(shard.shardId().getIndexName(), id, new BytesArray(source), xContentType, routing);
|
||||
Engine.IndexResult result;
|
||||
if (shard.routingEntry().primary()) {
|
||||
result = shard.applyIndexOperationOnPrimary(
|
||||
|
@ -911,7 +898,7 @@ public abstract class IndexShardTestCase extends OpenSearchTestCase {
|
|||
updateMappings(
|
||||
shard,
|
||||
IndexMetadata.builder(shard.indexSettings().getIndexMetadata())
|
||||
.putMapping(type, result.getRequiredMappingUpdate().toString())
|
||||
.putMapping(result.getRequiredMappingUpdate().toString())
|
||||
.build()
|
||||
);
|
||||
result = shard.applyIndexOperationOnPrimary(
|
||||
|
@ -956,12 +943,11 @@ public abstract class IndexShardTestCase extends OpenSearchTestCase {
|
|||
);
|
||||
}
|
||||
|
||||
protected Engine.DeleteResult deleteDoc(IndexShard shard, String type, String id) throws IOException {
|
||||
protected Engine.DeleteResult deleteDoc(IndexShard shard, String id) throws IOException {
|
||||
final Engine.DeleteResult result;
|
||||
if (shard.routingEntry().primary()) {
|
||||
result = shard.applyDeleteOperationOnPrimary(
|
||||
Versions.MATCH_ANY,
|
||||
type,
|
||||
id,
|
||||
VersionType.INTERNAL,
|
||||
SequenceNumbers.UNASSIGNED_SEQ_NO,
|
||||
|
@ -972,7 +958,7 @@ public abstract class IndexShardTestCase extends OpenSearchTestCase {
|
|||
} else {
|
||||
final long seqNo = shard.seqNoStats().getMaxSeqNo() + 1;
|
||||
shard.advanceMaxSeqNoOfUpdatesOrDeletes(seqNo); // manually replicate max_seq_no_of_updates
|
||||
result = shard.applyDeleteOperationOnReplica(seqNo, shard.getOperationPrimaryTerm(), 0L, type, id);
|
||||
result = shard.applyDeleteOperationOnReplica(seqNo, shard.getOperationPrimaryTerm(), 0L, id);
|
||||
shard.sync(); // advance local checkpoint
|
||||
}
|
||||
return result;
|
||||
|
|
Loading…
Reference in New Issue