Store the reason of noop in its document tombstone (#30570)

Relates #29530
This commit is contained in:
Nhat Nguyen 2018-05-15 13:36:54 -04:00 committed by GitHub
parent b971a81e70
commit 2a2c23be2f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 26 additions and 15 deletions

View File

@ -380,8 +380,9 @@ public final class EngineConfig {
/**
* Creates a tombstone document for a noop operation.
* @param reason the reason of an a noop
*/
ParsedDocument newNoopTombstoneDoc();
ParsedDocument newNoopTombstoneDoc(String reason);
}
public TombstoneDocSupplier getTombstoneDocSupplier() {

View File

@ -1349,7 +1349,7 @@ public class InternalEngine extends Engine {
Exception failure = null;
if (softDeleteEnabled) {
try {
final ParsedDocument tombstone = engineConfig.getTombstoneDocSupplier().newNoopTombstoneDoc();
final ParsedDocument tombstone = engineConfig.getTombstoneDocSupplier().newNoopTombstoneDoc(noOp.reason());
tombstone.updateSeqID(noOp.seqNo(), noOp.primaryTerm());
// A noop tombstone does not require a _version but it's added to have a fully dense docvalues for the version field.
// 1L is selected to optimize the compression because it might probably be the most common value in version field.

View File

@ -186,7 +186,7 @@ final class LuceneChangesSnapshot implements Translog.Snapshot {
final Translog.Operation op;
final boolean isTombstone = docValues[leaf.ord].isTombstone(segmentDocID);
if (isTombstone && fields.uid() == null) {
op = new Translog.NoOp(seqNo, primaryTerm, ""); // TODO: store reason in ignored fields?
op = new Translog.NoOp(seqNo, primaryTerm, fields.source().utf8ToString());
assert version == 1L : "Noop tombstone should have version 1L; actual version [" + version + "]";
assert assertDocSoftDeleted(leaf.reader(), segmentDocID) : "Noop but soft_deletes field is not set [" + op + "]";
} else {

View File

@ -19,10 +19,12 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedXContent;
@ -262,10 +264,14 @@ public class DocumentMapper implements ToXContentFragment {
return documentParser.parseDocument(emptySource, deleteTombstoneMetadataFieldMappers).toTombstone();
}
public ParsedDocument createNoopTombstoneDoc(String index) throws MapperParsingException {
public ParsedDocument createNoopTombstoneDoc(String index, String reason) throws MapperParsingException {
final String id = ""; // _id won't be used.
final SourceToParse emptySource = SourceToParse.source(index, type, id, new BytesArray("{}"), XContentType.JSON);
return documentParser.parseDocument(emptySource, noopTombstoneMetadataFieldMappers).toTombstone();
final SourceToParse sourceToParse = SourceToParse.source(index, type, id, new BytesArray("{}"), XContentType.JSON);
final ParsedDocument parsedDoc = documentParser.parseDocument(sourceToParse, noopTombstoneMetadataFieldMappers).toTombstone();
// Store the reason of a noop as a raw string in the _source field
final BytesRef byteRef = new BytesRef(reason);
parsedDoc.rootDoc().add(new StoredField(SourceFieldMapper.NAME, byteRef.bytes, byteRef.offset, byteRef.length));
return parsedDoc;
}
/**

View File

@ -2614,8 +2614,8 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
return docMapper(type).getDocumentMapper().createDeleteTombstoneDoc(shardId.getIndexName(), type, id);
}
@Override
public ParsedDocument newNoopTombstoneDoc() {
return noopDocumentMapper.createNoopTombstoneDoc(shardId.getIndexName());
public ParsedDocument newNoopTombstoneDoc(String reason) {
return noopDocumentMapper.createNoopTombstoneDoc(shardId.getIndexName(), reason);
}
};
}

View File

@ -3711,7 +3711,7 @@ public class InternalEngineTests extends EngineTestCase {
};
noOpEngine.recoverFromTranslog();
final int gapsFilled = noOpEngine.fillSeqNoGaps(primaryTerm.get());
final String reason = randomAlphaOfLength(16);
final String reason = "filling gaps";
noOpEngine.noOp(new Engine.NoOp(maxSeqNo + 1, primaryTerm.get(), LOCAL_TRANSLOG_RECOVERY, System.nanoTime(), reason));
assertThat(noOpEngine.getLocalCheckpointTracker().getCheckpoint(), equalTo((long) (maxSeqNo + 1)));
assertThat(noOpEngine.getTranslog().stats().getUncommittedOperations(), equalTo(gapsFilled));
@ -3737,7 +3737,7 @@ public class InternalEngineTests extends EngineTestCase {
List<Translog.Operation> operationsFromLucene = readAllOperationsInLucene(noOpEngine, mapperService);
assertThat(operationsFromLucene, hasSize(maxSeqNo + 2 - localCheckpoint)); // fills n gap and 2 manual noop.
for (int i = 0; i < operationsFromLucene.size(); i++) {
assertThat(operationsFromLucene.get(i), equalTo(new Translog.NoOp(localCheckpoint + 1 + i, primaryTerm.get(), "")));
assertThat(operationsFromLucene.get(i), equalTo(new Translog.NoOp(localCheckpoint + 1 + i, primaryTerm.get(), "filling gaps")));
}
assertConsistentHistoryBetweenTranslogAndLuceneIndex(noOpEngine, mapperService);
} finally {

View File

@ -29,6 +29,7 @@ import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Constants;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
@ -77,7 +78,6 @@ import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.EngineException;
import org.elasticsearch.index.engine.EngineTestCase;
import org.elasticsearch.index.engine.InternalEngine;
import org.elasticsearch.index.engine.InternalEngineFactory;
import org.elasticsearch.index.engine.Segment;
@ -88,10 +88,10 @@ import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.VersionFieldMapper;
@ -3109,13 +3109,15 @@ public class IndexShardTests extends IndexShardTestCase {
assertThat(deleteDoc.getField(IdFieldMapper.NAME).binaryValue(), equalTo(Uid.encodeId(id)));
assertThat(deleteDoc.getField(SeqNoFieldMapper.TOMBSTONE_NAME).numericValue().longValue(), equalTo(1L));
ParsedDocument noopTombstone = shard.getEngine().config().getTombstoneDocSupplier().newNoopTombstoneDoc();
final String reason = randomUnicodeOfLength(200);
ParsedDocument noopTombstone = shard.getEngine().config().getTombstoneDocSupplier().newNoopTombstoneDoc(reason);
assertThat(noopTombstone.docs(), hasSize(1));
ParseContext.Document noopDoc = noopTombstone.docs().get(0);
assertThat(noopDoc.getFields().stream().map(IndexableField::name).collect(Collectors.toList()),
containsInAnyOrder(VersionFieldMapper.NAME, SeqNoFieldMapper.TOMBSTONE_NAME,
containsInAnyOrder(VersionFieldMapper.NAME, SourceFieldMapper.NAME, SeqNoFieldMapper.TOMBSTONE_NAME,
SeqNoFieldMapper.NAME, SeqNoFieldMapper.NAME, SeqNoFieldMapper.PRIMARY_TERM_NAME));
assertThat(noopDoc.getField(SeqNoFieldMapper.TOMBSTONE_NAME).numericValue().longValue(), equalTo(1L));
assertThat(noopDoc.getField(SourceFieldMapper.NAME).binaryValue(), equalTo(new BytesRef(reason)));
closeShards(shard);
}

View File

@ -306,7 +306,7 @@ public abstract class EngineTestCase extends ESTestCase {
}
@Override
public ParsedDocument newNoopTombstoneDoc() {
public ParsedDocument newNoopTombstoneDoc(String reason) {
final ParseContext.Document doc = new ParseContext.Document();
SeqNoFieldMapper.SequenceIDFields seqID = SeqNoFieldMapper.SequenceIDFields.emptySeqID();
doc.add(seqID.seqNo);
@ -316,6 +316,8 @@ public abstract class EngineTestCase extends ESTestCase {
doc.add(seqID.tombstoneField);
Field versionField = new NumericDocValuesField(VersionFieldMapper.NAME, 0);
doc.add(versionField);
BytesRef byteRef = new BytesRef(reason);
doc.add(new StoredField(SourceFieldMapper.NAME, byteRef.bytes, byteRef.offset, byteRef.length));
return new ParsedDocument(versionField, seqID, null, null, null,
Collections.singletonList(doc), null, XContentType.JSON, null);
}