LUCENE-8335: Enforce soft-deletes field up-front.

Soft deletes field must be marked as such once it's introduced
and can't be changed after the fact.
This commit is contained in:
Simon Willnauer 2018-06-02 12:30:02 +02:00
parent 1ff24bbb28
commit a2d9276674
20 changed files with 187 additions and 38 deletions

View File

@ -203,6 +203,9 @@ New Features
now use to also take pending deletes into account which ensures that all file now use to also take pending deletes into account which ensures that all file
generations per segment always go forward. (Simon Willnauer) generations per segment always go forward. (Simon Willnauer)
* LUCENE-8335: Enforce soft-deletes field up-front. Soft deletes field must be marked
as such once it's introduced and can't be changed after the fact. (Nhat Nguyen via Simon Willnauer)
Bug Fixes Bug Fixes
* LUCENE-8221: MoreLikeThis.setMaxDocFreqPct can easily int-overflow on larger * LUCENE-8221: MoreLikeThis.setMaxDocFreqPct can easily int-overflow on larger

View File

@ -66,6 +66,7 @@ public class SimpleTextFieldInfosFormat extends FieldInfosFormat {
static final BytesRef ATT_VALUE = new BytesRef(" value "); static final BytesRef ATT_VALUE = new BytesRef(" value ");
static final BytesRef DIM_COUNT = new BytesRef(" dimensional count "); static final BytesRef DIM_COUNT = new BytesRef(" dimensional count ");
static final BytesRef DIM_NUM_BYTES = new BytesRef(" dimensional num bytes "); static final BytesRef DIM_NUM_BYTES = new BytesRef(" dimensional num bytes ");
static final BytesRef SOFT_DELETES = new BytesRef(" soft-deletes ");
@Override @Override
public FieldInfos read(Directory directory, SegmentInfo segmentInfo, String segmentSuffix, IOContext iocontext) throws IOException { public FieldInfos read(Directory directory, SegmentInfo segmentInfo, String segmentSuffix, IOContext iocontext) throws IOException {
@ -140,9 +141,13 @@ public class SimpleTextFieldInfosFormat extends FieldInfosFormat {
assert StringHelper.startsWith(scratch.get(), DIM_NUM_BYTES); assert StringHelper.startsWith(scratch.get(), DIM_NUM_BYTES);
int dimensionalNumBytes = Integer.parseInt(readString(DIM_NUM_BYTES.length, scratch)); int dimensionalNumBytes = Integer.parseInt(readString(DIM_NUM_BYTES.length, scratch));
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch.get(), SOFT_DELETES);
boolean isSoftDeletesField = Boolean.parseBoolean(readString(SOFT_DELETES.length, scratch));
infos[i] = new FieldInfo(name, fieldNumber, storeTermVector, infos[i] = new FieldInfo(name, fieldNumber, storeTermVector,
omitNorms, storePayloads, indexOptions, docValuesType, dvGen, Collections.unmodifiableMap(atts), omitNorms, storePayloads, indexOptions, docValuesType, dvGen, Collections.unmodifiableMap(atts),
dimensionalCount, dimensionalNumBytes); dimensionalCount, dimensionalNumBytes, isSoftDeletesField);
} }
SimpleTextUtil.checkFooter(input); SimpleTextUtil.checkFooter(input);
@ -238,6 +243,10 @@ public class SimpleTextFieldInfosFormat extends FieldInfosFormat {
SimpleTextUtil.write(out, DIM_NUM_BYTES); SimpleTextUtil.write(out, DIM_NUM_BYTES);
SimpleTextUtil.write(out, Integer.toString(fi.getPointNumBytes()), scratch); SimpleTextUtil.write(out, Integer.toString(fi.getPointNumBytes()), scratch);
SimpleTextUtil.writeNewline(out); SimpleTextUtil.writeNewline(out);
SimpleTextUtil.write(out, SOFT_DELETES);
SimpleTextUtil.write(out, Boolean.toString(fi.isSoftDeletesField()), scratch);
SimpleTextUtil.writeNewline(out);
} }
SimpleTextUtil.writeChecksum(out, scratch); SimpleTextUtil.writeChecksum(out, scratch);
success = true; success = true;

View File

@ -148,7 +148,7 @@ public final class Lucene50FieldInfosFormat extends FieldInfosFormat {
lastAttributes = attributes; lastAttributes = attributes;
try { try {
infos[i] = new FieldInfo(name, fieldNumber, storeTermVector, omitNorms, storePayloads, infos[i] = new FieldInfo(name, fieldNumber, storeTermVector, omitNorms, storePayloads,
indexOptions, docValuesType, dvGen, attributes, 0, 0); indexOptions, docValuesType, dvGen, attributes, 0, 0, false);
infos[i].checkConsistency(); infos[i].checkConsistency();
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
throw new CorruptIndexException("invalid fieldinfo for field: " + name + ", fieldNumber=" + fieldNumber, input, e); throw new CorruptIndexException("invalid fieldinfo for field: " + name + ", fieldNumber=" + fieldNumber, input, e);

View File

@ -136,6 +136,7 @@ public final class Lucene60FieldInfosFormat extends FieldInfosFormat {
boolean storeTermVector = (bits & STORE_TERMVECTOR) != 0; boolean storeTermVector = (bits & STORE_TERMVECTOR) != 0;
boolean omitNorms = (bits & OMIT_NORMS) != 0; boolean omitNorms = (bits & OMIT_NORMS) != 0;
boolean storePayloads = (bits & STORE_PAYLOADS) != 0; boolean storePayloads = (bits & STORE_PAYLOADS) != 0;
boolean isSoftDeletesField = (bits & SOFT_DELETES_FIELD) != 0;
final IndexOptions indexOptions = getIndexOptions(input, input.readByte()); final IndexOptions indexOptions = getIndexOptions(input, input.readByte());
@ -159,7 +160,7 @@ public final class Lucene60FieldInfosFormat extends FieldInfosFormat {
try { try {
infos[i] = new FieldInfo(name, fieldNumber, storeTermVector, omitNorms, storePayloads, infos[i] = new FieldInfo(name, fieldNumber, storeTermVector, omitNorms, storePayloads,
indexOptions, docValuesType, dvGen, attributes, indexOptions, docValuesType, dvGen, attributes,
pointDimensionCount, pointNumBytes); pointDimensionCount, pointNumBytes, isSoftDeletesField);
infos[i].checkConsistency(); infos[i].checkConsistency();
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
throw new CorruptIndexException("invalid fieldinfo for field: " + name + ", fieldNumber=" + fieldNumber, input, e); throw new CorruptIndexException("invalid fieldinfo for field: " + name + ", fieldNumber=" + fieldNumber, input, e);
@ -277,6 +278,7 @@ public final class Lucene60FieldInfosFormat extends FieldInfosFormat {
if (fi.hasVectors()) bits |= STORE_TERMVECTOR; if (fi.hasVectors()) bits |= STORE_TERMVECTOR;
if (fi.omitsNorms()) bits |= OMIT_NORMS; if (fi.omitsNorms()) bits |= OMIT_NORMS;
if (fi.hasPayloads()) bits |= STORE_PAYLOADS; if (fi.hasPayloads()) bits |= STORE_PAYLOADS;
if (fi.isSoftDeletesField()) bits |= SOFT_DELETES_FIELD;
output.writeByte(bits); output.writeByte(bits);
output.writeByte(indexOptionsByte(fi.getIndexOptions())); output.writeByte(indexOptionsByte(fi.getIndexOptions()));
@ -301,10 +303,12 @@ public final class Lucene60FieldInfosFormat extends FieldInfosFormat {
// Codec header // Codec header
static final String CODEC_NAME = "Lucene60FieldInfos"; static final String CODEC_NAME = "Lucene60FieldInfos";
static final int FORMAT_START = 0; static final int FORMAT_START = 0;
static final int FORMAT_CURRENT = FORMAT_START; static final int FORMAT_SOFT_DELETES = 1;
static final int FORMAT_CURRENT = FORMAT_SOFT_DELETES;
// Field flags // Field flags
static final byte STORE_TERMVECTOR = 0x1; static final byte STORE_TERMVECTOR = 0x1;
static final byte OMIT_NORMS = 0x2; static final byte OMIT_NORMS = 0x2;
static final byte STORE_PAYLOADS = 0x4; static final byte STORE_PAYLOADS = 0x4;
static final byte SOFT_DELETES_FIELD = 0x8;
} }

View File

@ -53,14 +53,17 @@ public final class FieldInfo {
private int pointDimensionCount; private int pointDimensionCount;
private int pointNumBytes; private int pointNumBytes;
// whether this field is used as the soft-deletes field
private final boolean softDeletesField;
/** /**
* Sole constructor. * Sole constructor.
* *
* @lucene.experimental * @lucene.experimental
*/ */
public FieldInfo(String name, int number, boolean storeTermVector, boolean omitNorms, public FieldInfo(String name, int number, boolean storeTermVector, boolean omitNorms, boolean storePayloads,
boolean storePayloads, IndexOptions indexOptions, DocValuesType docValues, IndexOptions indexOptions, DocValuesType docValues, long dvGen, Map<String,String> attributes,
long dvGen, Map<String,String> attributes, int pointDimensionCount, int pointNumBytes) { int pointDimensionCount, int pointNumBytes, boolean softDeletesField) {
this.name = Objects.requireNonNull(name); this.name = Objects.requireNonNull(name);
this.number = number; this.number = number;
this.docValuesType = Objects.requireNonNull(docValues, "DocValuesType must not be null (field: \"" + name + "\")"); this.docValuesType = Objects.requireNonNull(docValues, "DocValuesType must not be null (field: \"" + name + "\")");
@ -78,6 +81,7 @@ public final class FieldInfo {
this.attributes = Objects.requireNonNull(attributes); this.attributes = Objects.requireNonNull(attributes);
this.pointDimensionCount = pointDimensionCount; this.pointDimensionCount = pointDimensionCount;
this.pointNumBytes = pointNumBytes; this.pointNumBytes = pointNumBytes;
this.softDeletesField = softDeletesField;
assert checkConsistency(); assert checkConsistency();
} }
@ -332,4 +336,12 @@ public final class FieldInfo {
public Map<String,String> attributes() { public Map<String,String> attributes() {
return attributes; return attributes;
} }
/**
* Returns true if this field is configured and used as the soft-deletes field.
* See {@link IndexWriterConfig#softDeletesField}
*/
public boolean isSoftDeletesField() {
return softDeletesField;
}
} }

View File

@ -221,13 +221,17 @@ public class FieldInfos implements Iterable<FieldInfo> {
// norms back on after they were already ommitted; today // norms back on after they were already ommitted; today
// we silently discard the norm but this is badly trappy // we silently discard the norm but this is badly trappy
private int lowestUnassignedFieldNumber = -1; private int lowestUnassignedFieldNumber = -1;
// The soft-deletes field from IWC to enforce a single soft-deletes field
private final String softDeletesFieldName;
FieldNumbers() { FieldNumbers(String softDeletesFieldName) {
this.nameToNumber = new HashMap<>(); this.nameToNumber = new HashMap<>();
this.numberToName = new HashMap<>(); this.numberToName = new HashMap<>();
this.indexOptions = new HashMap<>(); this.indexOptions = new HashMap<>();
this.docValuesType = new HashMap<>(); this.docValuesType = new HashMap<>();
this.dimensions = new HashMap<>(); this.dimensions = new HashMap<>();
this.softDeletesFieldName = softDeletesFieldName;
} }
/** /**
@ -236,7 +240,7 @@ public class FieldInfos implements Iterable<FieldInfo> {
* number assigned if possible otherwise the first unassigned field number * number assigned if possible otherwise the first unassigned field number
* is used as the field number. * is used as the field number.
*/ */
synchronized int addOrGet(String fieldName, int preferredFieldNumber, IndexOptions indexOptions, DocValuesType dvType, int dimensionCount, int dimensionNumBytes) { synchronized int addOrGet(String fieldName, int preferredFieldNumber, IndexOptions indexOptions, DocValuesType dvType, int dimensionCount, int dimensionNumBytes, boolean isSoftDeletesField) {
if (indexOptions != IndexOptions.NONE) { if (indexOptions != IndexOptions.NONE) {
IndexOptions currentOpts = this.indexOptions.get(fieldName); IndexOptions currentOpts = this.indexOptions.get(fieldName);
if (currentOpts == null) { if (currentOpts == null) {
@ -284,6 +288,16 @@ public class FieldInfos implements Iterable<FieldInfo> {
nameToNumber.put(fieldName, fieldNumber); nameToNumber.put(fieldName, fieldNumber);
} }
if (isSoftDeletesField) {
if (softDeletesFieldName == null) {
throw new IllegalArgumentException("this index has [" + fieldName + "] as soft-deletes already but soft-deletes field is not configured in IWC");
} else if (fieldName.equals(softDeletesFieldName) == false) {
throw new IllegalArgumentException("cannot configure [" + softDeletesFieldName + "] as soft-deletes; this index uses [" + fieldName + "] as soft-deletes already");
}
} else if (fieldName.equals(softDeletesFieldName)) {
throw new IllegalArgumentException("cannot configure [" + softDeletesFieldName + "] as soft-deletes; this index uses [" + fieldName + "] as non-soft-deletes already");
}
return fieldNumber.intValue(); return fieldNumber.intValue();
} }
@ -385,7 +399,7 @@ public class FieldInfos implements Iterable<FieldInfo> {
private boolean finished; private boolean finished;
Builder() { Builder() {
this(new FieldNumbers()); this(new FieldNumbers(null));
} }
/** /**
@ -413,8 +427,9 @@ public class FieldInfos implements Iterable<FieldInfo> {
// number for this field. If the field was seen // number for this field. If the field was seen
// before then we'll get the same name and number, // before then we'll get the same name and number,
// else we'll allocate a new one: // else we'll allocate a new one:
final int fieldNumber = globalFieldNumbers.addOrGet(name, -1, IndexOptions.NONE, DocValuesType.NONE, 0, 0); final boolean isSoftDeletesField = name.equals(globalFieldNumbers.softDeletesFieldName);
fi = new FieldInfo(name, fieldNumber, false, false, false, IndexOptions.NONE, DocValuesType.NONE, -1, new HashMap<>(), 0, 0); final int fieldNumber = globalFieldNumbers.addOrGet(name, -1, IndexOptions.NONE, DocValuesType.NONE, 0, 0, isSoftDeletesField);
fi = new FieldInfo(name, fieldNumber, false, false, false, IndexOptions.NONE, DocValuesType.NONE, -1, new HashMap<>(), 0, 0, isSoftDeletesField);
assert !byName.containsKey(fi.name); assert !byName.containsKey(fi.name);
globalFieldNumbers.verifyConsistent(Integer.valueOf(fi.number), fi.name, DocValuesType.NONE); globalFieldNumbers.verifyConsistent(Integer.valueOf(fi.number), fi.name, DocValuesType.NONE);
byName.put(fi.name, fi); byName.put(fi.name, fi);
@ -427,7 +442,7 @@ public class FieldInfos implements Iterable<FieldInfo> {
boolean storeTermVector, boolean storeTermVector,
boolean omitNorms, boolean storePayloads, IndexOptions indexOptions, boolean omitNorms, boolean storePayloads, IndexOptions indexOptions,
DocValuesType docValues, long dvGen, DocValuesType docValues, long dvGen,
int dimensionCount, int dimensionNumBytes) { int dimensionCount, int dimensionNumBytes, boolean isSoftDeletesField) {
assert assertNotFinished(); assert assertNotFinished();
if (docValues == null) { if (docValues == null) {
throw new NullPointerException("DocValuesType must not be null"); throw new NullPointerException("DocValuesType must not be null");
@ -439,8 +454,8 @@ public class FieldInfos implements Iterable<FieldInfo> {
// number for this field. If the field was seen // number for this field. If the field was seen
// before then we'll get the same name and number, // before then we'll get the same name and number,
// else we'll allocate a new one: // else we'll allocate a new one:
final int fieldNumber = globalFieldNumbers.addOrGet(name, preferredFieldNumber, indexOptions, docValues, dimensionCount, dimensionNumBytes); final int fieldNumber = globalFieldNumbers.addOrGet(name, preferredFieldNumber, indexOptions, docValues, dimensionCount, dimensionNumBytes, isSoftDeletesField);
fi = new FieldInfo(name, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, docValues, dvGen, new HashMap<>(), dimensionCount, dimensionNumBytes); fi = new FieldInfo(name, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, docValues, dvGen, new HashMap<>(), dimensionCount, dimensionNumBytes, isSoftDeletesField);
assert !byName.containsKey(fi.name); assert !byName.containsKey(fi.name);
globalFieldNumbers.verifyConsistent(Integer.valueOf(fi.number), fi.name, fi.getDocValuesType()); globalFieldNumbers.verifyConsistent(Integer.valueOf(fi.number), fi.name, fi.getDocValuesType());
byName.put(fi.name, fi); byName.put(fi.name, fi);
@ -473,7 +488,7 @@ public class FieldInfos implements Iterable<FieldInfo> {
return addOrUpdateInternal(fi.name, fi.number, fi.hasVectors(), return addOrUpdateInternal(fi.name, fi.number, fi.hasVectors(),
fi.omitsNorms(), fi.hasPayloads(), fi.omitsNorms(), fi.hasPayloads(),
fi.getIndexOptions(), fi.getDocValuesType(), dvGen, fi.getIndexOptions(), fi.getDocValuesType(), dvGen,
fi.getPointDimensionCount(), fi.getPointNumBytes()); fi.getPointDimensionCount(), fi.getPointNumBytes(), fi.isSoftDeletesField());
} }
public FieldInfo fieldInfo(String fieldName) { public FieldInfo fieldInfo(String fieldName) {

View File

@ -960,12 +960,12 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable,
* If this {@link SegmentInfos} has no global field number map the returned instance is empty * If this {@link SegmentInfos} has no global field number map the returned instance is empty
*/ */
private FieldNumbers getFieldNumberMap() throws IOException { private FieldNumbers getFieldNumberMap() throws IOException {
final FieldNumbers map = new FieldNumbers(); final FieldNumbers map = new FieldNumbers(config.softDeletesField);
for(SegmentCommitInfo info : segmentInfos) { for(SegmentCommitInfo info : segmentInfos) {
FieldInfos fis = readFieldInfos(info); FieldInfos fis = readFieldInfos(info);
for(FieldInfo fi : fis) { for(FieldInfo fi : fis) {
map.addOrGet(fi.name, fi.number, fi.getIndexOptions(), fi.getDocValuesType(), fi.getPointDimensionCount(), fi.getPointNumBytes()); map.addOrGet(fi.name, fi.number, fi.getIndexOptions(), fi.getDocValuesType(), fi.getPointDimensionCount(), fi.getPointNumBytes(), fi.isSoftDeletesField());
} }
} }
@ -1787,7 +1787,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable,
if (globalFieldNumberMap.contains(f.name(), dvType) == false) { if (globalFieldNumberMap.contains(f.name(), dvType) == false) {
// if this field doesn't exists we try to add it. if it exists and the DV type doesn't match we // if this field doesn't exists we try to add it. if it exists and the DV type doesn't match we
// get a consistent error message as if you try to do that during an indexing operation. // get a consistent error message as if you try to do that during an indexing operation.
globalFieldNumberMap.addOrGet(f.name(), -1, IndexOptions.NONE, dvType, 0, 0); globalFieldNumberMap.addOrGet(f.name(), -1, IndexOptions.NONE, dvType, 0, 0, f.name().equals(config.softDeletesField));
assert globalFieldNumberMap.contains(f.name(), dvType); assert globalFieldNumberMap.contains(f.name(), dvType);
} }
if (config.getIndexSortFields().contains(f.name())) { if (config.getIndexSortFields().contains(f.name())) {
@ -2824,7 +2824,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable,
FieldInfos fis = readFieldInfos(info); FieldInfos fis = readFieldInfos(info);
for(FieldInfo fi : fis) { for(FieldInfo fi : fis) {
// This will throw exceptions if any of the incoming fields have an illegal schema change: // This will throw exceptions if any of the incoming fields have an illegal schema change:
globalFieldNumberMap.addOrGet(fi.name, fi.number, fi.getIndexOptions(), fi.getDocValuesType(), fi.getPointDimensionCount(), fi.getPointNumBytes()); globalFieldNumberMap.addOrGet(fi.name, fi.number, fi.getIndexOptions(), fi.getDocValuesType(), fi.getPointDimensionCount(), fi.getPointNumBytes(), fi.isSoftDeletesField());
} }
infos.add(copySegmentAsIs(info, newSegName, context)); infos.add(copySegmentAsIs(info, newSegName, context));
} }

View File

@ -222,7 +222,7 @@ public class TestDoc extends LuceneTestCase {
SegmentMerger merger = new SegmentMerger(Arrays.<CodecReader>asList(r1, r2), SegmentMerger merger = new SegmentMerger(Arrays.<CodecReader>asList(r1, r2),
si, InfoStream.getDefault(), trackingDir, si, InfoStream.getDefault(), trackingDir,
new FieldInfos.FieldNumbers(), context); new FieldInfos.FieldNumbers(null), context);
MergeState mergeState = merger.merge(); MergeState mergeState = merger.merge();
r1.close(); r1.close();

View File

@ -3376,4 +3376,108 @@ public class TestIndexWriter extends LuceneTestCase {
IOUtils.close(reader, writer, dir); IOUtils.close(reader, writer, dir);
} }
public void testPreventChangingSoftDeletesField() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig().setSoftDeletesField("my_deletes"));
Document v1 = new Document();
v1.add(new StringField("id", "1", Field.Store.YES));
v1.add(new StringField("version", "1", Field.Store.YES));
writer.addDocument(v1);
Document v2 = new Document();
v2.add(new StringField("id", "1", Field.Store.YES));
v2.add(new StringField("version", "2", Field.Store.YES));
writer.softUpdateDocument(new Term("id", "1"), v2, new NumericDocValuesField("my_deletes", 1));
writer.commit();
writer.close();
for (SegmentCommitInfo si : SegmentInfos.readLatestCommit(dir)) {
FieldInfo softDeleteField = IndexWriter.readFieldInfos(si).fieldInfo("my_deletes");
assertTrue(softDeleteField.isSoftDeletesField());
}
IllegalArgumentException illegalError = expectThrows(IllegalArgumentException.class, () -> {
new IndexWriter(dir, newIndexWriterConfig().setSoftDeletesField("your_deletes"));
});
assertEquals("cannot configure [your_deletes] as soft-deletes; " +
"this index uses [my_deletes] as soft-deletes already", illegalError.getMessage());
IndexWriterConfig softDeleteConfig = newIndexWriterConfig().setSoftDeletesField("my_deletes")
.setMergePolicy(new SoftDeletesRetentionMergePolicy("my_deletes", () -> new MatchAllDocsQuery(), newMergePolicy()));
writer = new IndexWriter(dir, softDeleteConfig);
Document tombstone = new Document();
tombstone.add(new StringField("id", "tombstone", Field.Store.YES));
tombstone.add(new NumericDocValuesField("my_deletes", 1));
writer.addDocument(tombstone);
writer.flush();
for (SegmentCommitInfo si : writer.segmentInfos) {
FieldInfo softDeleteField = IndexWriter.readFieldInfos(si).fieldInfo("my_deletes");
assertTrue(softDeleteField.isSoftDeletesField());
}
writer.close();
// reopen writer without soft-deletes field should be prevented
IllegalArgumentException reopenError = expectThrows(IllegalArgumentException.class, () -> {
new IndexWriter(dir, newIndexWriterConfig());
});
assertEquals("this index has [my_deletes] as soft-deletes already" +
" but soft-deletes field is not configured in IWC", reopenError.getMessage());
dir.close();
}
public void testPreventAddingIndexesWithDifferentSoftDeletesField() throws Exception {
Directory dir1 = newDirectory();
IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig().setSoftDeletesField("soft_deletes_1"));
for (int i = 0; i < 2; i++) {
Document d = new Document();
d.add(new StringField("id", "1", Field.Store.YES));
d.add(new StringField("version", Integer.toString(i), Field.Store.YES));
w1.softUpdateDocument(new Term("id", "1"), d, new NumericDocValuesField("soft_deletes_1", 1));
}
w1.commit();
w1.close();
Directory dir2 = newDirectory();
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig().setSoftDeletesField("soft_deletes_2"));
IllegalArgumentException error = expectThrows(IllegalArgumentException.class, () -> w2.addIndexes(dir1));
assertEquals("cannot configure [soft_deletes_2] as soft-deletes; this index uses [soft_deletes_1] as soft-deletes already",
error.getMessage());
w2.close();
Directory dir3 = newDirectory();
IndexWriterConfig config = newIndexWriterConfig().setSoftDeletesField("soft_deletes_1");
IndexWriter w3 = new IndexWriter(dir3, config);
w3.addIndexes(dir1);
for (SegmentCommitInfo si : w3.segmentInfos) {
FieldInfo softDeleteField = IndexWriter.readFieldInfos(si).fieldInfo("soft_deletes_1");
assertTrue(softDeleteField.isSoftDeletesField());
}
w3.close();
IOUtils.close(dir1, dir2, dir3);
}
public void testNotAllowUsingExistingFieldAsSoftDeletes() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
for (int i = 0; i < 2; i++) {
Document d = new Document();
d.add(new StringField("id", "1", Field.Store.YES));
if (random().nextBoolean()) {
d.add(new NumericDocValuesField("dv_field", 1));
w.updateDocument(new Term("id", "1"), d);
} else {
w.softUpdateDocument(new Term("id", "1"), d, new NumericDocValuesField("dv_field", 1));
}
}
w.commit();
w.close();
String softDeletesField = random().nextBoolean() ? "id" : "dv_field";
IllegalArgumentException error = expectThrows(IllegalArgumentException.class, () -> {
IndexWriterConfig config = newIndexWriterConfig().setSoftDeletesField(softDeletesField);
new IndexWriter(dir, config);
});
assertEquals("cannot configure [" + softDeletesField + "] as soft-deletes;" +
" this index uses [" + softDeletesField + "] as non-soft-deletes already", error.getMessage());
IndexWriterConfig config = newIndexWriterConfig().setSoftDeletesField("non-existing-field");
w = new IndexWriter(dir, config);
w.close();
dir.close();
}
} }

View File

@ -120,7 +120,7 @@ public class TestPendingSoftDeletes extends TestPendingDeletes {
deletes.onNewReader(segmentReader, commitInfo); deletes.onNewReader(segmentReader, commitInfo);
reader.close(); reader.close();
writer.close(); writer.close();
FieldInfo fieldInfo = new FieldInfo("_soft_deletes", 1, false, false, false, IndexOptions.NONE, DocValuesType.NUMERIC, 0, Collections.emptyMap(), 0, 0); FieldInfo fieldInfo = new FieldInfo("_soft_deletes", 1, false, false, false, IndexOptions.NONE, DocValuesType.NUMERIC, 0, Collections.emptyMap(), 0, 0, true);
List<Integer> docsDeleted = Arrays.asList(1, 3, 7, 8, DocIdSetIterator.NO_MORE_DOCS); List<Integer> docsDeleted = Arrays.asList(1, 3, 7, 8, DocIdSetIterator.NO_MORE_DOCS);
List<DocValuesFieldUpdates> updates = Arrays.asList(singleUpdate(docsDeleted, 10, true)); List<DocValuesFieldUpdates> updates = Arrays.asList(singleUpdate(docsDeleted, 10, true));
for (DocValuesFieldUpdates update : updates) { for (DocValuesFieldUpdates update : updates) {
@ -140,7 +140,7 @@ public class TestPendingSoftDeletes extends TestPendingDeletes {
docsDeleted = Arrays.asList(1, 2, DocIdSetIterator.NO_MORE_DOCS); docsDeleted = Arrays.asList(1, 2, DocIdSetIterator.NO_MORE_DOCS);
updates = Arrays.asList(singleUpdate(docsDeleted, 10, true)); updates = Arrays.asList(singleUpdate(docsDeleted, 10, true));
fieldInfo = new FieldInfo("_soft_deletes", 1, false, false, false, IndexOptions.NONE, DocValuesType.NUMERIC, 1, Collections.emptyMap(), 0, 0); fieldInfo = new FieldInfo("_soft_deletes", 1, false, false, false, IndexOptions.NONE, DocValuesType.NUMERIC, 1, Collections.emptyMap(), 0, 0, true);
for (DocValuesFieldUpdates update : updates) { for (DocValuesFieldUpdates update : updates) {
deletes.onDocValuesUpdate(fieldInfo, update.iterator()); deletes.onDocValuesUpdate(fieldInfo, update.iterator());
} }
@ -182,7 +182,7 @@ public class TestPendingSoftDeletes extends TestPendingDeletes {
SegmentCommitInfo segmentInfo = segmentReader.getSegmentInfo(); SegmentCommitInfo segmentInfo = segmentReader.getSegmentInfo();
PendingDeletes deletes = newPendingDeletes(segmentInfo); PendingDeletes deletes = newPendingDeletes(segmentInfo);
deletes.onNewReader(segmentReader, segmentInfo); deletes.onNewReader(segmentReader, segmentInfo);
FieldInfo fieldInfo = new FieldInfo("_soft_deletes", 1, false, false, false, IndexOptions.NONE, DocValuesType.NUMERIC, segmentInfo.getNextDocValuesGen(), Collections.emptyMap(), 0, 0); FieldInfo fieldInfo = new FieldInfo("_soft_deletes", 1, false, false, false, IndexOptions.NONE, DocValuesType.NUMERIC, segmentInfo.getNextDocValuesGen(), Collections.emptyMap(), 0, 0, true);
List<Integer> docsDeleted = Arrays.asList(1, DocIdSetIterator.NO_MORE_DOCS); List<Integer> docsDeleted = Arrays.asList(1, DocIdSetIterator.NO_MORE_DOCS);
List<DocValuesFieldUpdates> updates = Arrays.asList(singleUpdate(docsDeleted, 3, true)); List<DocValuesFieldUpdates> updates = Arrays.asList(singleUpdate(docsDeleted, 3, true));
for (DocValuesFieldUpdates update : updates) { for (DocValuesFieldUpdates update : updates) {
@ -228,7 +228,7 @@ public class TestPendingSoftDeletes extends TestPendingDeletes {
SegmentCommitInfo segmentInfo = segmentReader.getSegmentInfo(); SegmentCommitInfo segmentInfo = segmentReader.getSegmentInfo();
PendingDeletes deletes = newPendingDeletes(segmentInfo); PendingDeletes deletes = newPendingDeletes(segmentInfo);
deletes.onNewReader(segmentReader, segmentInfo); deletes.onNewReader(segmentReader, segmentInfo);
FieldInfo fieldInfo = new FieldInfo("_soft_deletes", 1, false, false, false, IndexOptions.NONE, DocValuesType.NUMERIC, segmentInfo.getNextDocValuesGen(), Collections.emptyMap(), 0, 0); FieldInfo fieldInfo = new FieldInfo("_soft_deletes", 1, false, false, false, IndexOptions.NONE, DocValuesType.NUMERIC, segmentInfo.getNextDocValuesGen(), Collections.emptyMap(), 0, 0, true);
List<DocValuesFieldUpdates> updates = Arrays.asList(singleUpdate(Arrays.asList(0, 1, DocIdSetIterator.NO_MORE_DOCS), 3, false)); List<DocValuesFieldUpdates> updates = Arrays.asList(singleUpdate(Arrays.asList(0, 1, DocIdSetIterator.NO_MORE_DOCS), 3, false));
for (DocValuesFieldUpdates update : updates) { for (DocValuesFieldUpdates update : updates) {
deletes.onDocValuesUpdate(fieldInfo, update.iterator()); deletes.onDocValuesUpdate(fieldInfo, update.iterator());
@ -247,7 +247,7 @@ public class TestPendingSoftDeletes extends TestPendingDeletes {
assertEquals(0, deletes.numPendingDeletes()); assertEquals(0, deletes.numPendingDeletes());
segmentInfo.advanceDocValuesGen(); segmentInfo.advanceDocValuesGen();
fieldInfo = new FieldInfo("_soft_deletes", 1, false, false, false, IndexOptions.NONE, DocValuesType.NUMERIC, segmentInfo.getNextDocValuesGen(), Collections.emptyMap(), 0, 0); fieldInfo = new FieldInfo("_soft_deletes", 1, false, false, false, IndexOptions.NONE, DocValuesType.NUMERIC, segmentInfo.getNextDocValuesGen(), Collections.emptyMap(), 0, 0, true);
updates = Arrays.asList(singleUpdate(Arrays.asList(1, DocIdSetIterator.NO_MORE_DOCS), 3, true)); updates = Arrays.asList(singleUpdate(Arrays.asList(1, DocIdSetIterator.NO_MORE_DOCS), 3, true));
for (DocValuesFieldUpdates update : updates) { for (DocValuesFieldUpdates update : updates) {
deletes.onDocValuesUpdate(fieldInfo, update.iterator()); deletes.onDocValuesUpdate(fieldInfo, update.iterator());

View File

@ -88,7 +88,7 @@ public class TestSegmentMerger extends LuceneTestCase {
SegmentMerger merger = new SegmentMerger(Arrays.<CodecReader>asList(reader1, reader2), SegmentMerger merger = new SegmentMerger(Arrays.<CodecReader>asList(reader1, reader2),
si, InfoStream.getDefault(), mergedDir, si, InfoStream.getDefault(), mergedDir,
new FieldInfos.FieldNumbers(), new FieldInfos.FieldNumbers(null),
newIOContext(random(), new IOContext(new MergeInfo(-1, -1, false, -1)))); newIOContext(random(), new IOContext(new MergeInfo(-1, -1, false, -1))));
MergeState mergeState = merger.merge(); MergeState mergeState = merger.merge();
int docsMerged = mergeState.segmentInfo.maxDoc(); int docsMerged = mergeState.segmentInfo.maxDoc();

View File

@ -81,7 +81,7 @@ public class TermVectorLeafReader extends LeafReader {
} }
FieldInfo fieldInfo = new FieldInfo(field, 0, FieldInfo fieldInfo = new FieldInfo(field, 0,
true, true, terms.hasPayloads(), true, true, terms.hasPayloads(),
indexOptions, DocValuesType.NONE, -1, Collections.emptyMap(), 0, 0); indexOptions, DocValuesType.NONE, -1, Collections.emptyMap(), 0, 0, false);
fieldInfos = new FieldInfos(new FieldInfo[]{fieldInfo}); fieldInfos = new FieldInfos(new FieldInfo[]{fieldInfo});
} }

View File

@ -501,7 +501,7 @@ public class MemoryIndex {
IndexOptions indexOptions = storeOffsets ? IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; IndexOptions indexOptions = storeOffsets ? IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
return new FieldInfo(fieldName, ord, fieldType.storeTermVectors(), fieldType.omitNorms(), storePayloads, return new FieldInfo(fieldName, ord, fieldType.storeTermVectors(), fieldType.omitNorms(), storePayloads,
indexOptions, fieldType.docValuesType(), -1, Collections.emptyMap(), indexOptions, fieldType.docValuesType(), -1, Collections.emptyMap(),
fieldType.pointDimensionCount(), fieldType.pointNumBytes()); fieldType.pointDimensionCount(), fieldType.pointNumBytes(), false);
} }
private void storePointValues(Info info, BytesRef pointValue) { private void storePointValues(Info info, BytesRef pointValue) {
@ -520,7 +520,7 @@ public class MemoryIndex {
info.fieldInfo = new FieldInfo( info.fieldInfo = new FieldInfo(
info.fieldInfo.name, info.fieldInfo.number, info.fieldInfo.hasVectors(), info.fieldInfo.hasPayloads(), info.fieldInfo.name, info.fieldInfo.number, info.fieldInfo.hasVectors(), info.fieldInfo.hasPayloads(),
info.fieldInfo.hasPayloads(), info.fieldInfo.getIndexOptions(), docValuesType, -1, info.fieldInfo.attributes(), info.fieldInfo.hasPayloads(), info.fieldInfo.getIndexOptions(), docValuesType, -1, info.fieldInfo.attributes(),
info.fieldInfo.getPointDimensionCount(), info.fieldInfo.getPointNumBytes() info.fieldInfo.getPointDimensionCount(), info.fieldInfo.getPointNumBytes(), info.fieldInfo.isSoftDeletesField()
); );
} else if (existingDocValuesType != docValuesType) { } else if (existingDocValuesType != docValuesType) {
throw new IllegalArgumentException("Can't add [" + docValuesType + "] doc values field [" + fieldName + "], because [" + existingDocValuesType + "] doc values field already exists"); throw new IllegalArgumentException("Can't add [" + docValuesType + "] doc values field [" + fieldName + "], because [" + existingDocValuesType + "] doc values field already exists");

View File

@ -323,7 +323,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
FieldInfo proto = oneDocReader.getFieldInfos().fieldInfo("field"); FieldInfo proto = oneDocReader.getFieldInfos().fieldInfo("field");
FieldInfo field = new FieldInfo(proto.name, proto.number, proto.hasVectors(), proto.omitsNorms(), proto.hasPayloads(), FieldInfo field = new FieldInfo(proto.name, proto.number, proto.hasVectors(), proto.omitsNorms(), proto.hasPayloads(),
proto.getIndexOptions(), proto.getDocValuesType(), proto.getDocValuesGen(), new HashMap<>(), proto.getIndexOptions(), proto.getDocValuesType(), proto.getDocValuesGen(), new HashMap<>(),
proto.getPointDimensionCount(), proto.getPointNumBytes()); proto.getPointDimensionCount(), proto.getPointNumBytes(), proto.isSoftDeletesField());
FieldInfos fieldInfos = new FieldInfos(new FieldInfo[] { field } ); FieldInfos fieldInfos = new FieldInfos(new FieldInfo[] { field } );

View File

@ -77,7 +77,8 @@ public class MismatchedLeafReader extends FilterLeafReader {
oldInfo.getDocValuesGen(), // dvGen oldInfo.getDocValuesGen(), // dvGen
oldInfo.attributes(), // attributes oldInfo.attributes(), // attributes
oldInfo.getPointDimensionCount(), // dimension count oldInfo.getPointDimensionCount(), // dimension count
oldInfo.getPointNumBytes()); // dimension numBytes oldInfo.getPointNumBytes(), // dimension numBytes
oldInfo.isSoftDeletesField()); // used as soft-deletes field
shuffled.set(i, newInfo); shuffled.set(i, newInfo);
} }

View File

@ -130,7 +130,7 @@ public class RandomPostingsTester {
fieldInfoArray[fieldUpto] = new FieldInfo(field, fieldUpto, false, false, true, fieldInfoArray[fieldUpto] = new FieldInfo(field, fieldUpto, false, false, true,
IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS, IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS,
DocValuesType.NONE, -1, new HashMap<>(), DocValuesType.NONE, -1, new HashMap<>(),
0, 0); 0, 0, false);
fieldUpto++; fieldUpto++;
SortedMap<BytesRef,SeedAndOrd> postings = new TreeMap<>(); SortedMap<BytesRef,SeedAndOrd> postings = new TreeMap<>();
@ -651,7 +651,7 @@ public class RandomPostingsTester {
DocValuesType.NONE, DocValuesType.NONE,
-1, -1,
new HashMap<>(), new HashMap<>(),
0, 0); 0, 0, false);
} }
FieldInfos newFieldInfos = new FieldInfos(newFieldInfoArray); FieldInfos newFieldInfos = new FieldInfos(newFieldInfoArray);

View File

@ -797,7 +797,8 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
fieldInfo.getDocValuesGen(), fieldInfo.getDocValuesGen(),
fieldInfo.attributes(), fieldInfo.attributes(),
fieldInfo.getPointDimensionCount(), fieldInfo.getPointDimensionCount(),
fieldInfo.getPointNumBytes()); fieldInfo.getPointNumBytes(),
fieldInfo.isSoftDeletesField());
newInfos.add(f); newInfos.add(f);
} else { } else {

View File

@ -425,7 +425,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
DocValuesType.NONE, DocValuesType.NONE,
fieldInfo.getDocValuesGen(), fieldInfo.getDocValuesGen(),
fieldInfo.attributes(), fieldInfo.attributes(),
0, 0); 0, 0, fieldInfo.isSoftDeletesField());
newInfos.add(f); newInfos.add(f);
} else { } else {

View File

@ -66,7 +66,7 @@ public class Insanity {
if (fi.name.equals(insaneField)) { if (fi.name.equals(insaneField)) {
filteredInfos.add(new FieldInfo(fi.name, fi.number, fi.hasVectors(), fi.omitsNorms(), filteredInfos.add(new FieldInfo(fi.name, fi.number, fi.hasVectors(), fi.omitsNorms(),
fi.hasPayloads(), fi.getIndexOptions(), DocValuesType.NONE, -1, Collections.emptyMap(), fi.hasPayloads(), fi.getIndexOptions(), DocValuesType.NONE, -1, Collections.emptyMap(),
fi.getPointDimensionCount(), fi.getPointNumBytes())); fi.getPointDimensionCount(), fi.getPointNumBytes(), fi.isSoftDeletesField()));
} else { } else {
filteredInfos.add(fi); filteredInfos.add(fi);
} }

View File

@ -282,7 +282,7 @@ public class UninvertingReader extends FilterLeafReader {
} }
filteredInfos.add(new FieldInfo(fi.name, fi.number, fi.hasVectors(), fi.omitsNorms(), filteredInfos.add(new FieldInfo(fi.name, fi.number, fi.hasVectors(), fi.omitsNorms(),
fi.hasPayloads(), fi.getIndexOptions(), type, fi.getDocValuesGen(), fi.attributes(), fi.hasPayloads(), fi.getIndexOptions(), type, fi.getDocValuesGen(), fi.attributes(),
fi.getPointDimensionCount(), fi.getPointNumBytes())); fi.getPointDimensionCount(), fi.getPointNumBytes(), fi.isSoftDeletesField()));
} }
fieldInfos = new FieldInfos(filteredInfos.toArray(new FieldInfo[filteredInfos.size()])); fieldInfos = new FieldInfos(filteredInfos.toArray(new FieldInfo[filteredInfos.size()]));
} }