mirror of https://github.com/apache/lucene.git
LUCENE-3246: invert getDelDocs to getLiveDocs as pre-cursor for LUCENE-1536
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1143415 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
e7722eebe5
commit
b55eeb510d
|
@ -23,13 +23,13 @@ import org.apache.lucene.util.BytesRef;
|
||||||
public class InstantiatedDocsAndPositionsEnum extends DocsAndPositionsEnum {
|
public class InstantiatedDocsAndPositionsEnum extends DocsAndPositionsEnum {
|
||||||
private int upto;
|
private int upto;
|
||||||
private int posUpto;
|
private int posUpto;
|
||||||
private Bits skipDocs;
|
private Bits liveDocs;
|
||||||
private InstantiatedTerm term;
|
private InstantiatedTerm term;
|
||||||
protected InstantiatedTermDocumentInformation currentDoc;
|
protected InstantiatedTermDocumentInformation currentDoc;
|
||||||
private final BytesRef payload = new BytesRef();
|
private final BytesRef payload = new BytesRef();
|
||||||
|
|
||||||
public InstantiatedDocsAndPositionsEnum reset(Bits skipDocs, InstantiatedTerm term) {
|
public InstantiatedDocsAndPositionsEnum reset(Bits liveDocs, InstantiatedTerm term) {
|
||||||
this.skipDocs = skipDocs;
|
this.liveDocs = liveDocs;
|
||||||
this.term = term;
|
this.term = term;
|
||||||
upto = -1;
|
upto = -1;
|
||||||
return this;
|
return this;
|
||||||
|
@ -47,7 +47,7 @@ public class InstantiatedDocsAndPositionsEnum extends DocsAndPositionsEnum {
|
||||||
return NO_MORE_DOCS;
|
return NO_MORE_DOCS;
|
||||||
} else {
|
} else {
|
||||||
currentDoc = term.getAssociatedDocuments()[upto];
|
currentDoc = term.getAssociatedDocuments()[upto];
|
||||||
if (skipDocs == null || !skipDocs.get(currentDoc.getDocument().getDocumentNumber())) {
|
if (liveDocs == null || liveDocs.get(currentDoc.getDocument().getDocumentNumber())) {
|
||||||
posUpto = -1;
|
posUpto = -1;
|
||||||
return docID();
|
return docID();
|
||||||
} else {
|
} else {
|
||||||
|
@ -69,7 +69,7 @@ public class InstantiatedDocsAndPositionsEnum extends DocsAndPositionsEnum {
|
||||||
}
|
}
|
||||||
currentDoc = term.getAssociatedDocuments()[upto];
|
currentDoc = term.getAssociatedDocuments()[upto];
|
||||||
|
|
||||||
if (skipDocs != null && skipDocs.get(currentDoc.getDocument().getDocumentNumber())) {
|
if (liveDocs != null && !liveDocs.get(currentDoc.getDocument().getDocumentNumber())) {
|
||||||
return nextDoc();
|
return nextDoc();
|
||||||
} else {
|
} else {
|
||||||
posUpto = -1;
|
posUpto = -1;
|
||||||
|
|
|
@ -21,12 +21,12 @@ import org.apache.lucene.util.Bits;
|
||||||
|
|
||||||
public class InstantiatedDocsEnum extends DocsEnum {
|
public class InstantiatedDocsEnum extends DocsEnum {
|
||||||
private int upto;
|
private int upto;
|
||||||
private Bits skipDocs;
|
private Bits liveDocs;
|
||||||
private InstantiatedTerm term;
|
private InstantiatedTerm term;
|
||||||
protected InstantiatedTermDocumentInformation currentDoc;
|
protected InstantiatedTermDocumentInformation currentDoc;
|
||||||
|
|
||||||
public InstantiatedDocsEnum reset(Bits skipDocs, InstantiatedTerm term) {
|
public InstantiatedDocsEnum reset(Bits liveDocs, InstantiatedTerm term) {
|
||||||
this.skipDocs = skipDocs;
|
this.liveDocs = liveDocs;
|
||||||
this.term = term;
|
this.term = term;
|
||||||
upto = -1;
|
upto = -1;
|
||||||
return this;
|
return this;
|
||||||
|
@ -44,7 +44,7 @@ public class InstantiatedDocsEnum extends DocsEnum {
|
||||||
return NO_MORE_DOCS;
|
return NO_MORE_DOCS;
|
||||||
} else {
|
} else {
|
||||||
currentDoc = term.getAssociatedDocuments()[upto];
|
currentDoc = term.getAssociatedDocuments()[upto];
|
||||||
if (skipDocs == null || !skipDocs.get(currentDoc.getDocument().getDocumentNumber())) {
|
if (liveDocs == null || liveDocs.get(currentDoc.getDocument().getDocumentNumber())) {
|
||||||
return docID();
|
return docID();
|
||||||
} else {
|
} else {
|
||||||
return nextDoc();
|
return nextDoc();
|
||||||
|
@ -65,7 +65,7 @@ public class InstantiatedDocsEnum extends DocsEnum {
|
||||||
}
|
}
|
||||||
currentDoc = term.getAssociatedDocuments()[upto];
|
currentDoc = term.getAssociatedDocuments()[upto];
|
||||||
|
|
||||||
if (skipDocs != null && skipDocs.get(currentDoc.getDocument().getDocumentNumber())) {
|
if (liveDocs != null && !liveDocs.get(currentDoc.getDocument().getDocumentNumber())) {
|
||||||
return nextDoc();
|
return nextDoc();
|
||||||
} else {
|
} else {
|
||||||
return docID();
|
return docID();
|
||||||
|
|
|
@ -182,9 +182,9 @@ public class InstantiatedIndex
|
||||||
}
|
}
|
||||||
|
|
||||||
// create documents
|
// create documents
|
||||||
final Bits delDocs = MultiFields.getDeletedDocs(sourceIndexReader);
|
final Bits liveDocs = MultiFields.getLiveDocs(sourceIndexReader);
|
||||||
for (int i = 0; i < sourceIndexReader.maxDoc(); i++) {
|
for (int i = 0; i < sourceIndexReader.maxDoc(); i++) {
|
||||||
if (delDocs != null && delDocs.get(i)) {
|
if (liveDocs != null && !liveDocs.get(i)) {
|
||||||
deletedDocuments.set(i);
|
deletedDocuments.set(i);
|
||||||
} else {
|
} else {
|
||||||
InstantiatedDocument document = new InstantiatedDocument();
|
InstantiatedDocument document = new InstantiatedDocument();
|
||||||
|
@ -254,7 +254,7 @@ public class InstantiatedIndex
|
||||||
// create term-document informations
|
// create term-document informations
|
||||||
for (InstantiatedTerm term : orderedTerms) {
|
for (InstantiatedTerm term : orderedTerms) {
|
||||||
DocsAndPositionsEnum termPositions = MultiFields.getTermPositionsEnum(sourceIndexReader,
|
DocsAndPositionsEnum termPositions = MultiFields.getTermPositionsEnum(sourceIndexReader,
|
||||||
MultiFields.getDeletedDocs(sourceIndexReader),
|
MultiFields.getLiveDocs(sourceIndexReader),
|
||||||
term.getTerm().field(),
|
term.getTerm().field(),
|
||||||
new BytesRef(term.getTerm().text()));
|
new BytesRef(term.getTerm().text()));
|
||||||
int position = 0;
|
int position = 0;
|
||||||
|
|
|
@ -107,11 +107,11 @@ public class InstantiatedIndexReader extends IndexReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Bits getDeletedDocs() {
|
public Bits getLiveDocs() {
|
||||||
return new Bits() {
|
return new Bits() {
|
||||||
public boolean get(int n) {
|
public boolean get(int n) {
|
||||||
return (index.getDeletedDocuments() != null && index.getDeletedDocuments().get(n))
|
return !(index.getDeletedDocuments() != null && index.getDeletedDocuments().get(n))
|
||||||
|| (uncommittedDeletedDocuments != null && uncommittedDeletedDocuments.get(n));
|
&& !(uncommittedDeletedDocuments != null && uncommittedDeletedDocuments.get(n));
|
||||||
}
|
}
|
||||||
|
|
||||||
public int length() {
|
public int length() {
|
||||||
|
|
|
@ -118,19 +118,19 @@ public class InstantiatedTermsEnum extends TermsEnum {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsEnum docs(Bits skipDocs, DocsEnum reuse) {
|
public DocsEnum docs(Bits liveDocs, DocsEnum reuse) {
|
||||||
if (reuse == null || !(reuse instanceof InstantiatedDocsEnum)) {
|
if (reuse == null || !(reuse instanceof InstantiatedDocsEnum)) {
|
||||||
reuse = new InstantiatedDocsEnum();
|
reuse = new InstantiatedDocsEnum();
|
||||||
}
|
}
|
||||||
return ((InstantiatedDocsEnum) reuse).reset(skipDocs, terms[upto]);
|
return ((InstantiatedDocsEnum) reuse).reset(liveDocs, terms[upto]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsAndPositionsEnum docsAndPositions(Bits skipDocs, DocsAndPositionsEnum reuse) {
|
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse) {
|
||||||
if (reuse == null || !(reuse instanceof InstantiatedDocsAndPositionsEnum)) {
|
if (reuse == null || !(reuse instanceof InstantiatedDocsAndPositionsEnum)) {
|
||||||
reuse = new InstantiatedDocsAndPositionsEnum();
|
reuse = new InstantiatedDocsAndPositionsEnum();
|
||||||
}
|
}
|
||||||
return ((InstantiatedDocsAndPositionsEnum) reuse).reset(skipDocs, terms[upto]);
|
return ((InstantiatedDocsAndPositionsEnum) reuse).reset(liveDocs, terms[upto]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -138,8 +138,8 @@ public class TestIndicesEquals extends LuceneTestCase {
|
||||||
testTermEnum.seekCeil(new BytesRef(t.text()));
|
testTermEnum.seekCeil(new BytesRef(t.text()));
|
||||||
assertEquals(aprioriTermEnum.term(), testTermEnum.term());
|
assertEquals(aprioriTermEnum.term(), testTermEnum.term());
|
||||||
|
|
||||||
DocsEnum aprioriTermDocs = aprioriTermEnum.docs(MultiFields.getDeletedDocs(aprioriReader), null);
|
DocsEnum aprioriTermDocs = aprioriTermEnum.docs(MultiFields.getLiveDocs(aprioriReader), null);
|
||||||
DocsEnum testTermDocs = testTermEnum.docs(MultiFields.getDeletedDocs(testReader), null);
|
DocsEnum testTermDocs = testTermEnum.docs(MultiFields.getLiveDocs(testReader), null);
|
||||||
|
|
||||||
assertEquals(aprioriTermDocs.nextDoc(), testTermDocs.nextDoc());
|
assertEquals(aprioriTermDocs.nextDoc(), testTermDocs.nextDoc());
|
||||||
assertEquals(aprioriTermDocs.freq(), testTermDocs.freq());
|
assertEquals(aprioriTermDocs.freq(), testTermDocs.freq());
|
||||||
|
@ -186,8 +186,8 @@ public class TestIndicesEquals extends LuceneTestCase {
|
||||||
|
|
||||||
assertEquals(aprioriTermEnum.next(), testTermEnum.next());
|
assertEquals(aprioriTermEnum.next(), testTermEnum.next());
|
||||||
|
|
||||||
aprioriTermDocs = aprioriTermEnum.docs(MultiFields.getDeletedDocs(aprioriReader), aprioriTermDocs);
|
aprioriTermDocs = aprioriTermEnum.docs(MultiFields.getLiveDocs(aprioriReader), aprioriTermDocs);
|
||||||
testTermDocs = testTermEnum.docs(MultiFields.getDeletedDocs(testReader), testTermDocs);
|
testTermDocs = testTermEnum.docs(MultiFields.getLiveDocs(testReader), testTermDocs);
|
||||||
|
|
||||||
while (aprioriTermDocs.nextDoc() != DocsEnum.NO_MORE_DOCS) {
|
while (aprioriTermDocs.nextDoc() != DocsEnum.NO_MORE_DOCS) {
|
||||||
assertTrue(testTermDocs.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
assertTrue(testTermDocs.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||||
|
@ -309,13 +309,13 @@ public class TestIndicesEquals extends LuceneTestCase {
|
||||||
assertEquals(air.numDocs(), tir.numDocs());
|
assertEquals(air.numDocs(), tir.numDocs());
|
||||||
assertEquals(air.numDeletedDocs(), tir.numDeletedDocs());
|
assertEquals(air.numDeletedDocs(), tir.numDeletedDocs());
|
||||||
|
|
||||||
final Bits aDelDocs = MultiFields.getDeletedDocs(air);
|
final Bits aLiveDocs = MultiFields.getLiveDocs(air);
|
||||||
final Bits tDelDocs = MultiFields.getDeletedDocs(tir);
|
final Bits tLiveDocs = MultiFields.getLiveDocs(tir);
|
||||||
assertTrue((aDelDocs != null && tDelDocs != null) ||
|
assertTrue((aLiveDocs != null && tLiveDocs != null) ||
|
||||||
(aDelDocs == null && tDelDocs == null));
|
(aLiveDocs == null && tLiveDocs == null));
|
||||||
if (aDelDocs != null) {
|
if (aLiveDocs != null) {
|
||||||
for (int d =0; d<air.maxDoc(); d++) {
|
for (int d =0; d<air.maxDoc(); d++) {
|
||||||
assertEquals(aDelDocs.get(d), tDelDocs.get(d));
|
assertEquals(aLiveDocs.get(d), tLiveDocs.get(d));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -366,13 +366,13 @@ public class TestIndicesEquals extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
final Bits apDelDocs = MultiFields.getDeletedDocs(aprioriReader);
|
final Bits apLiveDocs = MultiFields.getLiveDocs(aprioriReader);
|
||||||
final Bits testDelDocs = MultiFields.getDeletedDocs(testReader);
|
final Bits testLiveDocs = MultiFields.getLiveDocs(testReader);
|
||||||
assertTrue((apDelDocs != null && testDelDocs != null) ||
|
assertTrue((apLiveDocs != null && testLiveDocs != null) ||
|
||||||
(apDelDocs == null && testDelDocs == null));
|
(apLiveDocs == null && testLiveDocs == null));
|
||||||
if (apDelDocs != null) {
|
if (apLiveDocs != null) {
|
||||||
for (int docIndex = 0; docIndex < aprioriReader.numDocs(); docIndex++) {
|
for (int docIndex = 0; docIndex < aprioriReader.numDocs(); docIndex++) {
|
||||||
assertEquals(apDelDocs.get(docIndex), testDelDocs.get(docIndex));
|
assertEquals(apLiveDocs.get(docIndex), testLiveDocs.get(docIndex));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -407,8 +407,8 @@ public class TestIndicesEquals extends LuceneTestCase {
|
||||||
|
|
||||||
// compare termDocs seeking
|
// compare termDocs seeking
|
||||||
|
|
||||||
DocsEnum aprioriTermDocs = aprioriTermEnum.docs(MultiFields.getDeletedDocs(aprioriReader), null);
|
DocsEnum aprioriTermDocs = aprioriTermEnum.docs(MultiFields.getLiveDocs(aprioriReader), null);
|
||||||
DocsEnum testTermDocs = testTermEnum.docs(MultiFields.getDeletedDocs(testReader), null);
|
DocsEnum testTermDocs = testTermEnum.docs(MultiFields.getLiveDocs(testReader), null);
|
||||||
|
|
||||||
while (aprioriTermDocs.nextDoc() != DocsEnum.NO_MORE_DOCS) {
|
while (aprioriTermDocs.nextDoc() != DocsEnum.NO_MORE_DOCS) {
|
||||||
assertTrue(testTermDocs.advance(aprioriTermDocs.docID()) != DocsEnum.NO_MORE_DOCS);
|
assertTrue(testTermDocs.advance(aprioriTermDocs.docID()) != DocsEnum.NO_MORE_DOCS);
|
||||||
|
@ -419,8 +419,8 @@ public class TestIndicesEquals extends LuceneTestCase {
|
||||||
|
|
||||||
assertEquals(aprioriReader.docFreq(aprioriField, aprioriTermEnum.term()), testReader.docFreq(aprioriField, testTermEnum.term()));
|
assertEquals(aprioriReader.docFreq(aprioriField, aprioriTermEnum.term()), testReader.docFreq(aprioriField, testTermEnum.term()));
|
||||||
|
|
||||||
aprioriTermDocs = aprioriTermEnum.docs(MultiFields.getDeletedDocs(aprioriReader), aprioriTermDocs);
|
aprioriTermDocs = aprioriTermEnum.docs(MultiFields.getLiveDocs(aprioriReader), aprioriTermDocs);
|
||||||
testTermDocs = testTermEnum.docs(MultiFields.getDeletedDocs(testReader), testTermDocs);
|
testTermDocs = testTermEnum.docs(MultiFields.getLiveDocs(testReader), testTermDocs);
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
if (aprioriTermDocs.nextDoc() == DocsEnum.NO_MORE_DOCS) {
|
if (aprioriTermDocs.nextDoc() == DocsEnum.NO_MORE_DOCS) {
|
||||||
|
@ -439,8 +439,8 @@ public class TestIndicesEquals extends LuceneTestCase {
|
||||||
|
|
||||||
// compare term positions
|
// compare term positions
|
||||||
|
|
||||||
DocsAndPositionsEnum aprioriTermPositions = aprioriTermEnum.docsAndPositions(MultiFields.getDeletedDocs(aprioriReader), null);
|
DocsAndPositionsEnum aprioriTermPositions = aprioriTermEnum.docsAndPositions(MultiFields.getLiveDocs(aprioriReader), null);
|
||||||
DocsAndPositionsEnum testTermPositions = testTermEnum.docsAndPositions(MultiFields.getDeletedDocs(testReader), null);
|
DocsAndPositionsEnum testTermPositions = testTermEnum.docsAndPositions(MultiFields.getLiveDocs(testReader), null);
|
||||||
|
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println("TEST: enum1=" + aprioriTermPositions + " enum2=" + testTermPositions);
|
System.out.println("TEST: enum1=" + aprioriTermPositions + " enum2=" + testTermPositions);
|
||||||
|
|
|
@ -53,8 +53,9 @@ public class TestUnoptimizedReaderOnConstructor extends LuceneTestCase {
|
||||||
unoptimizedReader.deleteDocument(2);
|
unoptimizedReader.deleteDocument(2);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
new InstantiatedIndex(unoptimizedReader);
|
new InstantiatedIndex(unoptimizedReader);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
e.printStackTrace(System.out);
|
||||||
fail("No exceptions when loading an unoptimized reader!");
|
fail("No exceptions when loading an unoptimized reader!");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -769,7 +769,7 @@ public class MemoryIndex {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Bits getDeletedDocs() {
|
public Bits getLiveDocs() {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -925,19 +925,19 @@ public class MemoryIndex {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsEnum docs(Bits skipDocs, DocsEnum reuse) {
|
public DocsEnum docs(Bits liveDocs, DocsEnum reuse) {
|
||||||
if (reuse == null || !(reuse instanceof MemoryDocsEnum)) {
|
if (reuse == null || !(reuse instanceof MemoryDocsEnum)) {
|
||||||
reuse = new MemoryDocsEnum();
|
reuse = new MemoryDocsEnum();
|
||||||
}
|
}
|
||||||
return ((MemoryDocsEnum) reuse).reset(skipDocs, info.sortedTerms[termUpto].getValue());
|
return ((MemoryDocsEnum) reuse).reset(liveDocs, info.sortedTerms[termUpto].getValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsAndPositionsEnum docsAndPositions(Bits skipDocs, DocsAndPositionsEnum reuse) {
|
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse) {
|
||||||
if (reuse == null || !(reuse instanceof MemoryDocsAndPositionsEnum)) {
|
if (reuse == null || !(reuse instanceof MemoryDocsAndPositionsEnum)) {
|
||||||
reuse = new MemoryDocsAndPositionsEnum();
|
reuse = new MemoryDocsAndPositionsEnum();
|
||||||
}
|
}
|
||||||
return ((MemoryDocsAndPositionsEnum) reuse).reset(skipDocs, info.sortedTerms[termUpto].getValue());
|
return ((MemoryDocsAndPositionsEnum) reuse).reset(liveDocs, info.sortedTerms[termUpto].getValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -962,10 +962,10 @@ public class MemoryIndex {
|
||||||
private class MemoryDocsEnum extends DocsEnum {
|
private class MemoryDocsEnum extends DocsEnum {
|
||||||
private ArrayIntList positions;
|
private ArrayIntList positions;
|
||||||
private boolean hasNext;
|
private boolean hasNext;
|
||||||
private Bits skipDocs;
|
private Bits liveDocs;
|
||||||
|
|
||||||
public DocsEnum reset(Bits skipDocs, ArrayIntList positions) {
|
public DocsEnum reset(Bits liveDocs, ArrayIntList positions) {
|
||||||
this.skipDocs = skipDocs;
|
this.liveDocs = liveDocs;
|
||||||
this.positions = positions;
|
this.positions = positions;
|
||||||
hasNext = true;
|
hasNext = true;
|
||||||
return this;
|
return this;
|
||||||
|
@ -978,7 +978,7 @@ public class MemoryIndex {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() {
|
public int nextDoc() {
|
||||||
if (hasNext && (skipDocs == null || !skipDocs.get(0))) {
|
if (hasNext && (liveDocs == null || liveDocs.get(0))) {
|
||||||
hasNext = false;
|
hasNext = false;
|
||||||
return 0;
|
return 0;
|
||||||
} else {
|
} else {
|
||||||
|
@ -1001,10 +1001,10 @@ public class MemoryIndex {
|
||||||
private ArrayIntList positions;
|
private ArrayIntList positions;
|
||||||
private int posUpto;
|
private int posUpto;
|
||||||
private boolean hasNext;
|
private boolean hasNext;
|
||||||
private Bits skipDocs;
|
private Bits liveDocs;
|
||||||
|
|
||||||
public DocsAndPositionsEnum reset(Bits skipDocs, ArrayIntList positions) {
|
public DocsAndPositionsEnum reset(Bits liveDocs, ArrayIntList positions) {
|
||||||
this.skipDocs = skipDocs;
|
this.liveDocs = liveDocs;
|
||||||
this.positions = positions;
|
this.positions = positions;
|
||||||
posUpto = 0;
|
posUpto = 0;
|
||||||
hasNext = true;
|
hasNext = true;
|
||||||
|
@ -1018,7 +1018,7 @@ public class MemoryIndex {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() {
|
public int nextDoc() {
|
||||||
if (hasNext && (skipDocs == null || !skipDocs.get(0))) {
|
if (hasNext && (liveDocs == null || liveDocs.get(0))) {
|
||||||
hasNext = false;
|
hasNext = false;
|
||||||
return 0;
|
return 0;
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -120,7 +120,7 @@ public class FieldNormModifier {
|
||||||
|
|
||||||
final FieldInvertState invertState = new FieldInvertState();
|
final FieldInvertState invertState = new FieldInvertState();
|
||||||
for(IndexReader subReader : subReaders) {
|
for(IndexReader subReader : subReaders) {
|
||||||
final Bits delDocs = subReader.getDeletedDocs();
|
final Bits liveDocs = subReader.getLiveDocs();
|
||||||
|
|
||||||
int[] termCounts = new int[subReader.maxDoc()];
|
int[] termCounts = new int[subReader.maxDoc()];
|
||||||
Fields fields = subReader.fields();
|
Fields fields = subReader.fields();
|
||||||
|
@ -130,7 +130,7 @@ public class FieldNormModifier {
|
||||||
TermsEnum termsEnum = terms.iterator();
|
TermsEnum termsEnum = terms.iterator();
|
||||||
DocsEnum docs = null;
|
DocsEnum docs = null;
|
||||||
while(termsEnum.next() != null) {
|
while(termsEnum.next() != null) {
|
||||||
docs = termsEnum.docs(delDocs, docs);
|
docs = termsEnum.docs(liveDocs, docs);
|
||||||
while(true) {
|
while(true) {
|
||||||
int docID = docs.nextDoc();
|
int docID = docs.nextDoc();
|
||||||
if (docID != docs.NO_MORE_DOCS) {
|
if (docID != docs.NO_MORE_DOCS) {
|
||||||
|
@ -145,7 +145,7 @@ public class FieldNormModifier {
|
||||||
|
|
||||||
invertState.setBoost(1.0f);
|
invertState.setBoost(1.0f);
|
||||||
for (int d = 0; d < termCounts.length; d++) {
|
for (int d = 0; d < termCounts.length; d++) {
|
||||||
if (delDocs == null || !delDocs.get(d)) {
|
if (liveDocs == null || liveDocs.get(d)) {
|
||||||
invertState.setLength(termCounts[d]);
|
invertState.setLength(termCounts[d]);
|
||||||
subReader.setNorm(d, field, fieldSim.encodeNormValue(fieldSim.computeNorm(invertState)));
|
subReader.setNorm(d, field, fieldSim.encodeNormValue(fieldSim.computeNorm(invertState)));
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,8 +25,8 @@ import org.apache.lucene.index.IndexWriter; // javadoc
|
||||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.FSDirectory;
|
import org.apache.lucene.store.FSDirectory;
|
||||||
import org.apache.lucene.util.OpenBitSet;
|
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
|
import org.apache.lucene.util.OpenBitSet;
|
||||||
import org.apache.lucene.util.Version;
|
import org.apache.lucene.util.Version;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -177,27 +177,17 @@ public class MultiPassIndexSplitter {
|
||||||
* Instead, deletions are buffered in a bitset and overlaid with the original
|
* Instead, deletions are buffered in a bitset and overlaid with the original
|
||||||
* list of deletions.
|
* list of deletions.
|
||||||
*/
|
*/
|
||||||
public static class FakeDeleteIndexReader extends FilterIndexReader {
|
public static final class FakeDeleteIndexReader extends FilterIndexReader {
|
||||||
OpenBitSet dels;
|
OpenBitSet liveDocs;
|
||||||
OpenBitSet oldDels = null;
|
|
||||||
|
|
||||||
public FakeDeleteIndexReader(IndexReader in) {
|
public FakeDeleteIndexReader(IndexReader in) {
|
||||||
super(new SlowMultiReaderWrapper(in));
|
super(new SlowMultiReaderWrapper(in));
|
||||||
dels = new OpenBitSet(in.maxDoc());
|
doUndeleteAll(); // initialize main bitset
|
||||||
if (in.hasDeletions()) {
|
|
||||||
oldDels = new OpenBitSet(in.maxDoc());
|
|
||||||
final Bits oldDelBits = MultiFields.getDeletedDocs(in);
|
|
||||||
assert oldDelBits != null;
|
|
||||||
for (int i = 0; i < in.maxDoc(); i++) {
|
|
||||||
if (oldDelBits.get(i)) oldDels.set(i);
|
|
||||||
}
|
|
||||||
dels.or(oldDels);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int numDocs() {
|
public int numDocs() {
|
||||||
return in.maxDoc() - (int)dels.cardinality();
|
return (int) liveDocs.cardinality();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -205,26 +195,35 @@ public class MultiPassIndexSplitter {
|
||||||
* deletions.
|
* deletions.
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected void doUndeleteAll() throws CorruptIndexException, IOException {
|
protected void doUndeleteAll() {
|
||||||
dels = new OpenBitSet(in.maxDoc());
|
final int maxDoc = in.maxDoc();
|
||||||
if (oldDels != null) {
|
liveDocs = new OpenBitSet(maxDoc);
|
||||||
dels.or(oldDels);
|
if (in.hasDeletions()) {
|
||||||
|
final Bits oldLiveDocs = in.getLiveDocs();
|
||||||
|
assert oldLiveDocs != null;
|
||||||
|
// this loop is a little bit ineffective, as Bits has no nextSetBit():
|
||||||
|
for (int i = 0; i < maxDoc; i++) {
|
||||||
|
if (oldLiveDocs.get(i)) liveDocs.fastSet(i);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// mark all docs as valid
|
||||||
|
liveDocs.set(0, maxDoc);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doDelete(int n) throws CorruptIndexException, IOException {
|
protected void doDelete(int n) {
|
||||||
dels.set(n);
|
liveDocs.clear(n);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean hasDeletions() {
|
public boolean hasDeletions() {
|
||||||
return !dels.isEmpty();
|
return (in.maxDoc() != this.numDocs());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Bits getDeletedDocs() {
|
public Bits getLiveDocs() {
|
||||||
return dels;
|
return liveDocs;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,16 +19,16 @@ package org.apache.lucene.index;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
|
||||||
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
|
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||||
import org.apache.lucene.search.DocIdSet;
|
import org.apache.lucene.search.DocIdSet;
|
||||||
import org.apache.lucene.search.DocIdSetIterator;
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.Filter;
|
import org.apache.lucene.search.Filter;
|
||||||
import org.apache.lucene.search.TermRangeFilter;
|
import org.apache.lucene.search.TermRangeFilter;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.OpenBitSetDISI;
|
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
|
import org.apache.lucene.util.OpenBitSetDISI;
|
||||||
import org.apache.lucene.util.Version;
|
import org.apache.lucene.util.Version;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -87,13 +87,14 @@ public class PKIndexSplitter {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class DocumentFilteredIndexReader extends FilterIndexReader {
|
public static class DocumentFilteredIndexReader extends FilterIndexReader {
|
||||||
final Bits readerDels;
|
final Bits liveDocs;
|
||||||
final int numDocs;
|
final int numDocs;
|
||||||
|
|
||||||
public DocumentFilteredIndexReader(IndexReader reader, Filter preserveFilter, boolean negateFilter) throws IOException {
|
public DocumentFilteredIndexReader(IndexReader reader, Filter preserveFilter, boolean negateFilter) throws IOException {
|
||||||
super(new SlowMultiReaderWrapper(reader));
|
super(new SlowMultiReaderWrapper(reader));
|
||||||
|
|
||||||
final OpenBitSetDISI bits = new OpenBitSetDISI(in.maxDoc());
|
final int maxDoc = in.maxDoc();
|
||||||
|
final OpenBitSetDISI bits = new OpenBitSetDISI(maxDoc);
|
||||||
final DocIdSet docs = preserveFilter.getDocIdSet((AtomicReaderContext) in.getTopReaderContext());
|
final DocIdSet docs = preserveFilter.getDocIdSet((AtomicReaderContext) in.getTopReaderContext());
|
||||||
if (docs != null) {
|
if (docs != null) {
|
||||||
final DocIdSetIterator it = docs.iterator();
|
final DocIdSetIterator it = docs.iterator();
|
||||||
|
@ -101,23 +102,24 @@ public class PKIndexSplitter {
|
||||||
bits.inPlaceOr(it);
|
bits.inPlaceOr(it);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// this is somehow inverse, if we negate the filter, we delete all documents it matches!
|
if (negateFilter) {
|
||||||
if (!negateFilter) {
|
bits.flip(0, maxDoc);
|
||||||
bits.flip(0, in.maxDoc());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (in.hasDeletions()) {
|
if (in.hasDeletions()) {
|
||||||
final Bits oldDelBits = in.getDeletedDocs();
|
final Bits oldLiveDocs = in.getLiveDocs();
|
||||||
assert oldDelBits != null;
|
assert oldLiveDocs != null;
|
||||||
for (int i = 0; i < in.maxDoc(); i++) {
|
final DocIdSetIterator it = bits.iterator();
|
||||||
if (oldDelBits.get(i)) {
|
for (int i = it.nextDoc(); i < maxDoc; i = it.nextDoc()) {
|
||||||
bits.set(i);
|
if (!oldLiveDocs.get(i)) {
|
||||||
|
// we can safely modify the current bit, as the iterator already stepped over it:
|
||||||
|
bits.fastClear(i);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
this.readerDels = bits;
|
this.liveDocs = bits;
|
||||||
this.numDocs = in.maxDoc() - (int) bits.cardinality();
|
this.numDocs = (int) bits.cardinality();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -131,8 +133,8 @@ public class PKIndexSplitter {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Bits getDeletedDocs() {
|
public Bits getLiveDocs() {
|
||||||
return readerDels;
|
return liveDocs;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -96,7 +96,7 @@ public class TermVectorAccessor {
|
||||||
positions.clear();
|
positions.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
final Bits delDocs = MultiFields.getDeletedDocs(indexReader);
|
final Bits liveDocs = MultiFields.getLiveDocs(indexReader);
|
||||||
|
|
||||||
Terms terms = MultiFields.getTerms(indexReader, field);
|
Terms terms = MultiFields.getTerms(indexReader, field);
|
||||||
boolean anyTerms = false;
|
boolean anyTerms = false;
|
||||||
|
@ -109,9 +109,9 @@ public class TermVectorAccessor {
|
||||||
if (text != null) {
|
if (text != null) {
|
||||||
anyTerms = true;
|
anyTerms = true;
|
||||||
if (!mapper.isIgnoringPositions()) {
|
if (!mapper.isIgnoringPositions()) {
|
||||||
docs = postings = termsEnum.docsAndPositions(delDocs, postings);
|
docs = postings = termsEnum.docsAndPositions(liveDocs, postings);
|
||||||
} else {
|
} else {
|
||||||
docs = termsEnum.docs(delDocs, docs);
|
docs = termsEnum.docs(liveDocs, docs);
|
||||||
}
|
}
|
||||||
|
|
||||||
int docID = docs.advance(documentNumber);
|
int docID = docs.advance(documentNumber);
|
||||||
|
|
|
@ -190,8 +190,8 @@ public class HighFreqTerms {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
Bits skipDocs = MultiFields.getDeletedDocs(reader);
|
Bits liveDocs = MultiFields.getLiveDocs(reader);
|
||||||
if (skipDocs == null) {
|
if (liveDocs == null) {
|
||||||
// TODO: we could do this up front, during the scan
|
// TODO: we could do this up front, during the scan
|
||||||
// (next()), instead of after-the-fact here w/ seek,
|
// (next()), instead of after-the-fact here w/ seek,
|
||||||
// if the codec supports it and there are no del
|
// if the codec supports it and there are no del
|
||||||
|
@ -202,7 +202,7 @@ public class HighFreqTerms {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
DocsEnum de = termsEnum.docs(skipDocs, null);
|
DocsEnum de = termsEnum.docs(liveDocs, null);
|
||||||
|
|
||||||
// use DocsEnum.read() and BulkResult api
|
// use DocsEnum.read() and BulkResult api
|
||||||
final DocsEnum.BulkReadResult bulkresult = de.getBulkResult();
|
final DocsEnum.BulkReadResult bulkresult = de.getBulkResult();
|
||||||
|
|
|
@ -134,11 +134,11 @@ public class TestNRTManager extends LuceneTestCase {
|
||||||
System.out.println("TEST: now warm merged reader=" + reader);
|
System.out.println("TEST: now warm merged reader=" + reader);
|
||||||
}
|
}
|
||||||
final int maxDoc = reader.maxDoc();
|
final int maxDoc = reader.maxDoc();
|
||||||
final Bits delDocs = reader.getDeletedDocs();
|
final Bits liveDocs = reader.getLiveDocs();
|
||||||
int sum = 0;
|
int sum = 0;
|
||||||
final int inc = Math.max(1, maxDoc/50);
|
final int inc = Math.max(1, maxDoc/50);
|
||||||
for(int docID=0;docID<maxDoc;docID += inc) {
|
for(int docID=0;docID<maxDoc;docID += inc) {
|
||||||
if (delDocs == null || !delDocs.get(docID)) {
|
if (liveDocs == null || liveDocs.get(docID)) {
|
||||||
final Document doc = reader.document(docID);
|
final Document doc = reader.document(docID);
|
||||||
sum += doc.getFields().size();
|
sum += doc.getFields().size();
|
||||||
}
|
}
|
||||||
|
|
|
@ -87,9 +87,9 @@ public class TestPKIndexSplitter extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void checkContents(IndexReader ir, String indexname) throws Exception {
|
private void checkContents(IndexReader ir, String indexname) throws Exception {
|
||||||
final Bits delDocs = MultiFields.getDeletedDocs(ir);
|
final Bits liveDocs = MultiFields.getLiveDocs(ir);
|
||||||
for (int i = 0; i < ir.maxDoc(); i++) {
|
for (int i = 0; i < ir.maxDoc(); i++) {
|
||||||
if (delDocs == null || !delDocs.get(i)) {
|
if (liveDocs == null || liveDocs.get(i)) {
|
||||||
assertEquals(indexname, ir.document(i).get("indexname"));
|
assertEquals(indexname, ir.document(i).get("indexname"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -86,7 +86,7 @@ public class DuplicateFilter extends Filter
|
||||||
|
|
||||||
private OpenBitSet correctBits(IndexReader reader) throws IOException {
|
private OpenBitSet correctBits(IndexReader reader) throws IOException {
|
||||||
OpenBitSet bits = new OpenBitSet(reader.maxDoc()); //assume all are INvalid
|
OpenBitSet bits = new OpenBitSet(reader.maxDoc()); //assume all are INvalid
|
||||||
final Bits delDocs = MultiFields.getDeletedDocs(reader);
|
final Bits liveDocs = MultiFields.getLiveDocs(reader);
|
||||||
Terms terms = reader.fields().terms(fieldName);
|
Terms terms = reader.fields().terms(fieldName);
|
||||||
if (terms != null) {
|
if (terms != null) {
|
||||||
TermsEnum termsEnum = terms.iterator();
|
TermsEnum termsEnum = terms.iterator();
|
||||||
|
@ -96,7 +96,7 @@ public class DuplicateFilter extends Filter
|
||||||
if (currTerm == null) {
|
if (currTerm == null) {
|
||||||
break;
|
break;
|
||||||
} else {
|
} else {
|
||||||
docs = termsEnum.docs(delDocs, docs);
|
docs = termsEnum.docs(liveDocs, docs);
|
||||||
int doc = docs.nextDoc();
|
int doc = docs.nextDoc();
|
||||||
if (doc != DocsEnum.NO_MORE_DOCS) {
|
if (doc != DocsEnum.NO_MORE_DOCS) {
|
||||||
if (keepMode == KM_USE_FIRST_OCCURRENCE) {
|
if (keepMode == KM_USE_FIRST_OCCURRENCE) {
|
||||||
|
@ -124,7 +124,7 @@ public class DuplicateFilter extends Filter
|
||||||
|
|
||||||
OpenBitSet bits=new OpenBitSet(reader.maxDoc());
|
OpenBitSet bits=new OpenBitSet(reader.maxDoc());
|
||||||
bits.set(0,reader.maxDoc()); //assume all are valid
|
bits.set(0,reader.maxDoc()); //assume all are valid
|
||||||
final Bits delDocs = MultiFields.getDeletedDocs(reader);
|
final Bits liveDocs = MultiFields.getLiveDocs(reader);
|
||||||
Terms terms = reader.fields().terms(fieldName);
|
Terms terms = reader.fields().terms(fieldName);
|
||||||
if (terms != null) {
|
if (terms != null) {
|
||||||
TermsEnum termsEnum = terms.iterator();
|
TermsEnum termsEnum = terms.iterator();
|
||||||
|
@ -136,7 +136,7 @@ public class DuplicateFilter extends Filter
|
||||||
} else {
|
} else {
|
||||||
if (termsEnum.docFreq() > 1) {
|
if (termsEnum.docFreq() > 1) {
|
||||||
// unset potential duplicates
|
// unset potential duplicates
|
||||||
docs = termsEnum.docs(delDocs, docs);
|
docs = termsEnum.docs(liveDocs, docs);
|
||||||
int doc = docs.nextDoc();
|
int doc = docs.nextDoc();
|
||||||
if (doc != DocsEnum.NO_MORE_DOCS) {
|
if (doc != DocsEnum.NO_MORE_DOCS) {
|
||||||
if (keepMode == KM_USE_FIRST_OCCURRENCE) {
|
if (keepMode == KM_USE_FIRST_OCCURRENCE) {
|
||||||
|
|
|
@ -63,7 +63,7 @@ public class TermsFilter extends Filter
|
||||||
OpenBitSet result=new OpenBitSet(reader.maxDoc());
|
OpenBitSet result=new OpenBitSet(reader.maxDoc());
|
||||||
Fields fields = reader.fields();
|
Fields fields = reader.fields();
|
||||||
BytesRef br = new BytesRef();
|
BytesRef br = new BytesRef();
|
||||||
Bits delDocs = reader.getDeletedDocs();
|
Bits liveDocs = reader.getLiveDocs();
|
||||||
if (fields != null) {
|
if (fields != null) {
|
||||||
String lastField = null;
|
String lastField = null;
|
||||||
Terms termsC = null;
|
Terms termsC = null;
|
||||||
|
@ -80,7 +80,7 @@ public class TermsFilter extends Filter
|
||||||
if (terms != null) {
|
if (terms != null) {
|
||||||
br.copy(term.bytes());
|
br.copy(term.bytes());
|
||||||
if (termsEnum.seekCeil(br) == TermsEnum.SeekStatus.FOUND) {
|
if (termsEnum.seekCeil(br) == TermsEnum.SeekStatus.FOUND) {
|
||||||
docs = termsEnum.docs(delDocs, docs);
|
docs = termsEnum.docs(liveDocs, docs);
|
||||||
while(docs.nextDoc() != DocsEnum.NO_MORE_DOCS) {
|
while(docs.nextDoc() != DocsEnum.NO_MORE_DOCS) {
|
||||||
result.set(docs.docID());
|
result.set(docs.docID());
|
||||||
}
|
}
|
||||||
|
|
|
@ -139,7 +139,7 @@ public class DuplicateFilterTest extends LuceneTestCase {
|
||||||
Document d=searcher.doc(hits[i].doc);
|
Document d=searcher.doc(hits[i].doc);
|
||||||
String url=d.get(KEY_FIELD);
|
String url=d.get(KEY_FIELD);
|
||||||
DocsEnum td = MultiFields.getTermDocsEnum(reader,
|
DocsEnum td = MultiFields.getTermDocsEnum(reader,
|
||||||
MultiFields.getDeletedDocs(reader),
|
MultiFields.getLiveDocs(reader),
|
||||||
KEY_FIELD,
|
KEY_FIELD,
|
||||||
new BytesRef(url));
|
new BytesRef(url));
|
||||||
int lastDoc=0;
|
int lastDoc=0;
|
||||||
|
@ -163,7 +163,7 @@ public class DuplicateFilterTest extends LuceneTestCase {
|
||||||
Document d=searcher.doc(hits[i].doc);
|
Document d=searcher.doc(hits[i].doc);
|
||||||
String url=d.get(KEY_FIELD);
|
String url=d.get(KEY_FIELD);
|
||||||
DocsEnum td = MultiFields.getTermDocsEnum(reader,
|
DocsEnum td = MultiFields.getTermDocsEnum(reader,
|
||||||
MultiFields.getDeletedDocs(reader),
|
MultiFields.getLiveDocs(reader),
|
||||||
KEY_FIELD,
|
KEY_FIELD,
|
||||||
new BytesRef(url));
|
new BytesRef(url));
|
||||||
int lastDoc=0;
|
int lastDoc=0;
|
||||||
|
|
|
@ -46,7 +46,7 @@ public class CartesianShapeFilter extends Filter {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocIdSet getDocIdSet(final AtomicReaderContext context) throws IOException {
|
public DocIdSet getDocIdSet(final AtomicReaderContext context) throws IOException {
|
||||||
final Bits delDocs = context.reader.getDeletedDocs();
|
final Bits liveDocs = context.reader.getLiveDocs();
|
||||||
final List<Double> area = shape.getArea();
|
final List<Double> area = shape.getArea();
|
||||||
final int sz = area.size();
|
final int sz = area.size();
|
||||||
|
|
||||||
|
@ -58,7 +58,7 @@ public class CartesianShapeFilter extends Filter {
|
||||||
return new DocIdSet() {
|
return new DocIdSet() {
|
||||||
@Override
|
@Override
|
||||||
public DocIdSetIterator iterator() throws IOException {
|
public DocIdSetIterator iterator() throws IOException {
|
||||||
return context.reader.termDocsEnum(delDocs, fieldName, bytesRef);
|
return context.reader.termDocsEnum(liveDocs, fieldName, bytesRef);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -71,7 +71,7 @@ public class CartesianShapeFilter extends Filter {
|
||||||
for (int i =0; i< sz; i++) {
|
for (int i =0; i< sz; i++) {
|
||||||
double boxId = area.get(i).doubleValue();
|
double boxId = area.get(i).doubleValue();
|
||||||
NumericUtils.longToPrefixCoded(NumericUtils.doubleToSortableLong(boxId), 0, bytesRef);
|
NumericUtils.longToPrefixCoded(NumericUtils.doubleToSortableLong(boxId), 0, bytesRef);
|
||||||
final DocsEnum docsEnum = context.reader.termDocsEnum(delDocs, fieldName, bytesRef);
|
final DocsEnum docsEnum = context.reader.termDocsEnum(liveDocs, fieldName, bytesRef);
|
||||||
if (docsEnum == null) continue;
|
if (docsEnum == null) continue;
|
||||||
// iterate through all documents
|
// iterate through all documents
|
||||||
// which have this boxId
|
// which have this boxId
|
||||||
|
|
|
@ -162,7 +162,7 @@ class BufferedDeletesStream {
|
||||||
};
|
};
|
||||||
|
|
||||||
/** Resolves the buffered deleted Term/Query/docIDs, into
|
/** Resolves the buffered deleted Term/Query/docIDs, into
|
||||||
* actual deleted docIDs in the deletedDocs BitVector for
|
* actual deleted docIDs in the liveDocs BitVector for
|
||||||
* each SegmentReader. */
|
* each SegmentReader. */
|
||||||
public synchronized ApplyDeletesResult applyDeletes(IndexWriter.ReaderPool readerPool, List<SegmentInfo> infos) throws IOException {
|
public synchronized ApplyDeletesResult applyDeletes(IndexWriter.ReaderPool readerPool, List<SegmentInfo> infos) throws IOException {
|
||||||
final long t0 = System.currentTimeMillis();
|
final long t0 = System.currentTimeMillis();
|
||||||
|
@ -399,7 +399,7 @@ class BufferedDeletesStream {
|
||||||
// System.out.println(" term=" + term);
|
// System.out.println(" term=" + term);
|
||||||
|
|
||||||
if (termsEnum.seekExact(term.bytes(), false)) {
|
if (termsEnum.seekExact(term.bytes(), false)) {
|
||||||
DocsEnum docsEnum = termsEnum.docs(reader.getDeletedDocs(), docs);
|
DocsEnum docsEnum = termsEnum.docs(reader.getLiveDocs(), docs);
|
||||||
|
|
||||||
if (docsEnum != null) {
|
if (docsEnum != null) {
|
||||||
while (true) {
|
while (true) {
|
||||||
|
|
|
@ -17,12 +17,16 @@ package org.apache.lucene.index;
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.apache.lucene.search.DocIdSetIterator;
|
import java.io.File;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import java.io.IOException;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import java.io.PrintStream;
|
||||||
import org.apache.lucene.store.FSDirectory;
|
import java.text.NumberFormat;
|
||||||
import org.apache.lucene.store.Directory;
|
import java.util.ArrayList;
|
||||||
import org.apache.lucene.store.IndexInput;
|
import java.util.Collection;
|
||||||
|
import java.util.Comparator;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
import org.apache.lucene.document.AbstractField; // for javadocs
|
import org.apache.lucene.document.AbstractField; // for javadocs
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.index.codecs.CodecProvider;
|
import org.apache.lucene.index.codecs.CodecProvider;
|
||||||
|
@ -30,20 +34,16 @@ import org.apache.lucene.index.codecs.DefaultSegmentInfosWriter;
|
||||||
import org.apache.lucene.index.codecs.PerDocValues;
|
import org.apache.lucene.index.codecs.PerDocValues;
|
||||||
import org.apache.lucene.index.values.IndexDocValues;
|
import org.apache.lucene.index.values.IndexDocValues;
|
||||||
import org.apache.lucene.index.values.ValuesEnum;
|
import org.apache.lucene.index.values.ValuesEnum;
|
||||||
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
|
import org.apache.lucene.search.TermQuery;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.store.FSDirectory;
|
||||||
|
import org.apache.lucene.store.IndexInput;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.StringHelper;
|
import org.apache.lucene.util.StringHelper;
|
||||||
|
|
||||||
import java.text.NumberFormat;
|
|
||||||
import java.io.PrintStream;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.File;
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.Comparator;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Basic tool and API to check the health of an index and
|
* Basic tool and API to check the health of an index and
|
||||||
* write a new segments file that removes reference to
|
* write a new segments file that removes reference to
|
||||||
|
@ -520,13 +520,13 @@ public class CheckIndex {
|
||||||
final int numDocs = reader.numDocs();
|
final int numDocs = reader.numDocs();
|
||||||
toLoseDocCount = numDocs;
|
toLoseDocCount = numDocs;
|
||||||
if (reader.hasDeletions()) {
|
if (reader.hasDeletions()) {
|
||||||
if (reader.deletedDocs.count() != info.getDelCount()) {
|
if (reader.liveDocs.count() != info.docCount - info.getDelCount()) {
|
||||||
throw new RuntimeException("delete count mismatch: info=" + info.getDelCount() + " vs deletedDocs.count()=" + reader.deletedDocs.count());
|
throw new RuntimeException("delete count mismatch: info=" + (info.docCount - info.getDelCount()) + " vs reader=" + reader.liveDocs.count());
|
||||||
}
|
}
|
||||||
if (reader.deletedDocs.count() > reader.maxDoc()) {
|
if ((info.docCount-reader.liveDocs.count()) > reader.maxDoc()) {
|
||||||
throw new RuntimeException("too many deleted docs: maxDoc()=" + reader.maxDoc() + " vs deletedDocs.count()=" + reader.deletedDocs.count());
|
throw new RuntimeException("too many deleted docs: maxDoc()=" + reader.maxDoc() + " vs del count=" + (info.docCount-reader.liveDocs.count()));
|
||||||
}
|
}
|
||||||
if (info.docCount - numDocs != info.getDelCount()){
|
if (info.docCount - numDocs != info.getDelCount()) {
|
||||||
throw new RuntimeException("delete count mismatch: info=" + info.getDelCount() + " vs reader=" + (info.docCount - numDocs));
|
throw new RuntimeException("delete count mismatch: info=" + info.getDelCount() + " vs reader=" + (info.docCount - numDocs));
|
||||||
}
|
}
|
||||||
segInfoStat.numDeleted = info.docCount - numDocs;
|
segInfoStat.numDeleted = info.docCount - numDocs;
|
||||||
|
@ -654,7 +654,7 @@ public class CheckIndex {
|
||||||
final Status.TermIndexStatus status = new Status.TermIndexStatus();
|
final Status.TermIndexStatus status = new Status.TermIndexStatus();
|
||||||
|
|
||||||
final int maxDoc = reader.maxDoc();
|
final int maxDoc = reader.maxDoc();
|
||||||
final Bits delDocs = reader.getDeletedDocs();
|
final Bits liveDocs = reader.getLiveDocs();
|
||||||
|
|
||||||
final IndexSearcher is = new IndexSearcher(reader);
|
final IndexSearcher is = new IndexSearcher(reader);
|
||||||
|
|
||||||
|
@ -712,8 +712,8 @@ public class CheckIndex {
|
||||||
final int docFreq = terms.docFreq();
|
final int docFreq = terms.docFreq();
|
||||||
status.totFreq += docFreq;
|
status.totFreq += docFreq;
|
||||||
|
|
||||||
docs = terms.docs(delDocs, docs);
|
docs = terms.docs(liveDocs, docs);
|
||||||
postings = terms.docsAndPositions(delDocs, postings);
|
postings = terms.docsAndPositions(liveDocs, postings);
|
||||||
|
|
||||||
if (hasOrd) {
|
if (hasOrd) {
|
||||||
long ord = -1;
|
long ord = -1;
|
||||||
|
@ -815,7 +815,7 @@ public class CheckIndex {
|
||||||
if (hasPositions) {
|
if (hasPositions) {
|
||||||
for(int idx=0;idx<7;idx++) {
|
for(int idx=0;idx<7;idx++) {
|
||||||
final int skipDocID = (int) (((idx+1)*(long) maxDoc)/8);
|
final int skipDocID = (int) (((idx+1)*(long) maxDoc)/8);
|
||||||
postings = terms.docsAndPositions(delDocs, postings);
|
postings = terms.docsAndPositions(liveDocs, postings);
|
||||||
final int docID = postings.advance(skipDocID);
|
final int docID = postings.advance(skipDocID);
|
||||||
if (docID == DocsEnum.NO_MORE_DOCS) {
|
if (docID == DocsEnum.NO_MORE_DOCS) {
|
||||||
break;
|
break;
|
||||||
|
@ -851,7 +851,7 @@ public class CheckIndex {
|
||||||
} else {
|
} else {
|
||||||
for(int idx=0;idx<7;idx++) {
|
for(int idx=0;idx<7;idx++) {
|
||||||
final int skipDocID = (int) (((idx+1)*(long) maxDoc)/8);
|
final int skipDocID = (int) (((idx+1)*(long) maxDoc)/8);
|
||||||
docs = terms.docs(delDocs, docs);
|
docs = terms.docs(liveDocs, docs);
|
||||||
final int docID = docs.advance(skipDocID);
|
final int docID = docs.advance(skipDocID);
|
||||||
if (docID == DocsEnum.NO_MORE_DOCS) {
|
if (docID == DocsEnum.NO_MORE_DOCS) {
|
||||||
break;
|
break;
|
||||||
|
@ -919,7 +919,7 @@ public class CheckIndex {
|
||||||
throw new RuntimeException("seek to existing term " + seekTerms[i] + " failed");
|
throw new RuntimeException("seek to existing term " + seekTerms[i] + " failed");
|
||||||
}
|
}
|
||||||
|
|
||||||
docs = terms.docs(delDocs, docs);
|
docs = terms.docs(liveDocs, docs);
|
||||||
if (docs == null) {
|
if (docs == null) {
|
||||||
throw new RuntimeException("null DocsEnum from to existing term " + seekTerms[i]);
|
throw new RuntimeException("null DocsEnum from to existing term " + seekTerms[i]);
|
||||||
}
|
}
|
||||||
|
@ -967,9 +967,9 @@ public class CheckIndex {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Scan stored fields for all documents
|
// Scan stored fields for all documents
|
||||||
final Bits delDocs = reader.getDeletedDocs();
|
final Bits liveDocs = reader.getLiveDocs();
|
||||||
for (int j = 0; j < info.docCount; ++j) {
|
for (int j = 0; j < info.docCount; ++j) {
|
||||||
if (delDocs == null || !delDocs.get(j)) {
|
if (liveDocs == null || liveDocs.get(j)) {
|
||||||
status.docCount++;
|
status.docCount++;
|
||||||
Document doc = reader.document(j);
|
Document doc = reader.document(j);
|
||||||
status.totFields += doc.getFields().size();
|
status.totFields += doc.getFields().size();
|
||||||
|
@ -1063,9 +1063,9 @@ public class CheckIndex {
|
||||||
infoStream.print(" test: term vectors........");
|
infoStream.print(" test: term vectors........");
|
||||||
}
|
}
|
||||||
|
|
||||||
final Bits delDocs = reader.getDeletedDocs();
|
final Bits liveDocs = reader.getLiveDocs();
|
||||||
for (int j = 0; j < info.docCount; ++j) {
|
for (int j = 0; j < info.docCount; ++j) {
|
||||||
if (delDocs == null || !delDocs.get(j)) {
|
if (liveDocs == null || liveDocs.get(j)) {
|
||||||
status.docCount++;
|
status.docCount++;
|
||||||
TermFreqVector[] tfv = reader.getTermFreqVectors(j);
|
TermFreqVector[] tfv = reader.getTermFreqVectors(j);
|
||||||
if (tfv != null) {
|
if (tfv != null) {
|
||||||
|
|
|
@ -350,8 +350,8 @@ class DirectoryReader extends IndexReader implements Cloneable {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Bits getDeletedDocs() {
|
public Bits getLiveDocs() {
|
||||||
throw new UnsupportedOperationException("please use MultiFields.getDeletedDocs, or wrap your IndexReader with SlowMultiReaderWrapper, if you really need a top level Bits deletedDocs");
|
throw new UnsupportedOperationException("please use MultiFields.getLiveDocs, or wrap your IndexReader with SlowMultiReaderWrapper, if you really need a top level Bits liveDocs");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -249,7 +249,7 @@ public class DocTermOrds {
|
||||||
|
|
||||||
boolean testedOrd = false;
|
boolean testedOrd = false;
|
||||||
|
|
||||||
final Bits delDocs = MultiFields.getDeletedDocs(reader);
|
final Bits liveDocs = MultiFields.getLiveDocs(reader);
|
||||||
|
|
||||||
// we need a minimum of 9 bytes, but round up to 12 since the space would
|
// we need a minimum of 9 bytes, but round up to 12 since the space would
|
||||||
// be wasted with most allocators anyway.
|
// be wasted with most allocators anyway.
|
||||||
|
@ -312,7 +312,7 @@ public class DocTermOrds {
|
||||||
final int df = te.docFreq();
|
final int df = te.docFreq();
|
||||||
if (df <= maxTermDocFreq) {
|
if (df <= maxTermDocFreq) {
|
||||||
|
|
||||||
docsEnum = te.docs(delDocs, docsEnum);
|
docsEnum = te.docs(liveDocs, docsEnum);
|
||||||
|
|
||||||
final DocsEnum.BulkReadResult bulkResult = docsEnum.getBulkResult();
|
final DocsEnum.BulkReadResult bulkResult = docsEnum.getBulkResult();
|
||||||
|
|
||||||
|
@ -653,13 +653,13 @@ public class DocTermOrds {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsEnum docs(Bits skipDocs, DocsEnum reuse) throws IOException {
|
public DocsEnum docs(Bits liveDocs, DocsEnum reuse) throws IOException {
|
||||||
return termsEnum.docs(skipDocs, reuse);
|
return termsEnum.docs(liveDocs, reuse);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsAndPositionsEnum docsAndPositions(Bits skipDocs, DocsAndPositionsEnum reuse) throws IOException {
|
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse) throws IOException {
|
||||||
return termsEnum.docsAndPositions(skipDocs, reuse);
|
return termsEnum.docsAndPositions(liveDocs, reuse);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -111,13 +111,13 @@ public class DocumentsWriterPerThread {
|
||||||
static class FlushedSegment {
|
static class FlushedSegment {
|
||||||
final SegmentInfo segmentInfo;
|
final SegmentInfo segmentInfo;
|
||||||
final BufferedDeletes segmentDeletes;
|
final BufferedDeletes segmentDeletes;
|
||||||
final BitVector deletedDocuments;
|
final BitVector liveDocs;
|
||||||
|
|
||||||
private FlushedSegment(SegmentInfo segmentInfo,
|
private FlushedSegment(SegmentInfo segmentInfo,
|
||||||
BufferedDeletes segmentDeletes, BitVector deletedDocuments) {
|
BufferedDeletes segmentDeletes, BitVector liveDocs) {
|
||||||
this.segmentInfo = segmentInfo;
|
this.segmentInfo = segmentInfo;
|
||||||
this.segmentDeletes = segmentDeletes;
|
this.segmentDeletes = segmentDeletes;
|
||||||
this.deletedDocuments = deletedDocuments;
|
this.liveDocs = liveDocs;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -434,9 +434,10 @@ public class DocumentsWriterPerThread {
|
||||||
// happens when an exception is hit processing that
|
// happens when an exception is hit processing that
|
||||||
// doc, eg if analyzer has some problem w/ the text):
|
// doc, eg if analyzer has some problem w/ the text):
|
||||||
if (pendingDeletes.docIDs.size() > 0) {
|
if (pendingDeletes.docIDs.size() > 0) {
|
||||||
flushState.deletedDocs = new BitVector(numDocsInRAM);
|
flushState.liveDocs = new BitVector(numDocsInRAM);
|
||||||
|
flushState.liveDocs.invertAll();
|
||||||
for(int delDocID : pendingDeletes.docIDs) {
|
for(int delDocID : pendingDeletes.docIDs) {
|
||||||
flushState.deletedDocs.set(delDocID);
|
flushState.liveDocs.clear(delDocID);
|
||||||
}
|
}
|
||||||
pendingDeletes.bytesUsed.addAndGet(-pendingDeletes.docIDs.size() * BufferedDeletes.BYTES_PER_DEL_DOCID);
|
pendingDeletes.bytesUsed.addAndGet(-pendingDeletes.docIDs.size() * BufferedDeletes.BYTES_PER_DEL_DOCID);
|
||||||
pendingDeletes.docIDs.clear();
|
pendingDeletes.docIDs.clear();
|
||||||
|
@ -460,7 +461,7 @@ public class DocumentsWriterPerThread {
|
||||||
pendingDeletes.terms.clear();
|
pendingDeletes.terms.clear();
|
||||||
final SegmentInfo newSegment = new SegmentInfo(segment, flushState.numDocs, directory, false, flushState.segmentCodecs, fieldInfos.asReadOnly());
|
final SegmentInfo newSegment = new SegmentInfo(segment, flushState.numDocs, directory, false, flushState.segmentCodecs, fieldInfos.asReadOnly());
|
||||||
if (infoStream != null) {
|
if (infoStream != null) {
|
||||||
message("new segment has " + (flushState.deletedDocs == null ? 0 : flushState.deletedDocs.count()) + " deleted docs");
|
message("new segment has " + (flushState.liveDocs == null ? 0 : (flushState.numDocs - flushState.liveDocs.count())) + " deleted docs");
|
||||||
message("new segment has " + (newSegment.getHasVectors() ? "vectors" : "no vectors"));
|
message("new segment has " + (newSegment.getHasVectors() ? "vectors" : "no vectors"));
|
||||||
message("flushedFiles=" + newSegment.files());
|
message("flushedFiles=" + newSegment.files());
|
||||||
message("flushed codecs=" + newSegment.getSegmentCodecs());
|
message("flushed codecs=" + newSegment.getSegmentCodecs());
|
||||||
|
@ -489,7 +490,7 @@ public class DocumentsWriterPerThread {
|
||||||
doAfterFlush();
|
doAfterFlush();
|
||||||
success = true;
|
success = true;
|
||||||
|
|
||||||
return new FlushedSegment(newSegment, segmentDeletes, flushState.deletedDocs);
|
return new FlushedSegment(newSegment, segmentDeletes, flushState.liveDocs);
|
||||||
} finally {
|
} finally {
|
||||||
if (!success) {
|
if (!success) {
|
||||||
if (segment != null) {
|
if (segment != null) {
|
||||||
|
|
|
@ -87,13 +87,13 @@ public class FilterIndexReader extends IndexReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsEnum docs(Bits skipDocs, BytesRef text, DocsEnum reuse) throws IOException {
|
public DocsEnum docs(Bits liveDocs, BytesRef text, DocsEnum reuse) throws IOException {
|
||||||
return in.docs(skipDocs, text, reuse);
|
return in.docs(liveDocs, text, reuse);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsAndPositionsEnum docsAndPositions(Bits skipDocs, BytesRef text, DocsAndPositionsEnum reuse) throws IOException {
|
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, BytesRef text, DocsAndPositionsEnum reuse) throws IOException {
|
||||||
return in.docsAndPositions(skipDocs, text, reuse);
|
return in.docsAndPositions(liveDocs, text, reuse);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -172,13 +172,13 @@ public class FilterIndexReader extends IndexReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsEnum docs(Bits skipDocs, DocsEnum reuse) throws IOException {
|
public DocsEnum docs(Bits liveDocs, DocsEnum reuse) throws IOException {
|
||||||
return in.docs(skipDocs, reuse);
|
return in.docs(liveDocs, reuse);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsAndPositionsEnum docsAndPositions(Bits skipDocs, DocsAndPositionsEnum reuse) throws IOException {
|
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse) throws IOException {
|
||||||
return in.docsAndPositions(skipDocs, reuse);
|
return in.docsAndPositions(liveDocs, reuse);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -301,8 +301,8 @@ public class FilterIndexReader extends IndexReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Bits getDeletedDocs() {
|
public Bits getLiveDocs() {
|
||||||
return in.getDeletedDocs();
|
return in.getLiveDocs();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -339,10 +339,11 @@ final class FreqProxTermsWriterPerField extends TermsHashConsumerPerField implem
|
||||||
// Mark it deleted. TODO: we could also skip
|
// Mark it deleted. TODO: we could also skip
|
||||||
// writing its postings; this would be
|
// writing its postings; this would be
|
||||||
// deterministic (just for this Term's docs).
|
// deterministic (just for this Term's docs).
|
||||||
if (state.deletedDocs == null) {
|
if (state.liveDocs == null) {
|
||||||
state.deletedDocs = new BitVector(state.numDocs);
|
state.liveDocs = new BitVector(state.numDocs);
|
||||||
|
state.liveDocs.invertAll();
|
||||||
}
|
}
|
||||||
state.deletedDocs.set(docID);
|
state.liveDocs.clear(docID);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Carefully copy over the prox + payload info,
|
// Carefully copy over the prox + payload info,
|
||||||
|
|
|
@ -962,7 +962,7 @@ public abstract class IndexReader implements Cloneable,Closeable {
|
||||||
* requested document is deleted, and therefore asking for a deleted document
|
* requested document is deleted, and therefore asking for a deleted document
|
||||||
* may yield unspecified results. Usually this is not required, however you
|
* may yield unspecified results. Usually this is not required, however you
|
||||||
* can test if the doc is deleted by checking the {@link
|
* can test if the doc is deleted by checking the {@link
|
||||||
* Bits} returned from {@link MultiFields#getDeletedDocs}.
|
* Bits} returned from {@link MultiFields#getLiveDocs}.
|
||||||
*
|
*
|
||||||
* @throws CorruptIndexException if the index is corrupt
|
* @throws CorruptIndexException if the index is corrupt
|
||||||
* @throws IOException if there is a low-level IO error
|
* @throws IOException if there is a low-level IO error
|
||||||
|
@ -987,7 +987,7 @@ public abstract class IndexReader implements Cloneable,Closeable {
|
||||||
* requested document is deleted, and therefore asking for a deleted document
|
* requested document is deleted, and therefore asking for a deleted document
|
||||||
* may yield unspecified results. Usually this is not required, however you
|
* may yield unspecified results. Usually this is not required, however you
|
||||||
* can test if the doc is deleted by checking the {@link
|
* can test if the doc is deleted by checking the {@link
|
||||||
* Bits} returned from {@link MultiFields#getDeletedDocs}.
|
* Bits} returned from {@link MultiFields#getLiveDocs}.
|
||||||
*
|
*
|
||||||
* @param n Get the document at the <code>n</code><sup>th</sup> position
|
* @param n Get the document at the <code>n</code><sup>th</sup> position
|
||||||
* @param fieldSelector The {@link FieldSelector} to use to determine what
|
* @param fieldSelector The {@link FieldSelector} to use to determine what
|
||||||
|
@ -1136,7 +1136,7 @@ public abstract class IndexReader implements Cloneable,Closeable {
|
||||||
/** Returns {@link DocsEnum} for the specified field &
|
/** Returns {@link DocsEnum} for the specified field &
|
||||||
* term. This may return null, if either the field or
|
* term. This may return null, if either the field or
|
||||||
* term does not exist. */
|
* term does not exist. */
|
||||||
public DocsEnum termDocsEnum(Bits skipDocs, String field, BytesRef term) throws IOException {
|
public DocsEnum termDocsEnum(Bits liveDocs, String field, BytesRef term) throws IOException {
|
||||||
assert field != null;
|
assert field != null;
|
||||||
assert term != null;
|
assert term != null;
|
||||||
final Fields fields = fields();
|
final Fields fields = fields();
|
||||||
|
@ -1145,7 +1145,7 @@ public abstract class IndexReader implements Cloneable,Closeable {
|
||||||
}
|
}
|
||||||
final Terms terms = fields.terms(field);
|
final Terms terms = fields.terms(field);
|
||||||
if (terms != null) {
|
if (terms != null) {
|
||||||
return terms.docs(skipDocs, term, null);
|
return terms.docs(liveDocs, term, null);
|
||||||
} else {
|
} else {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -1155,7 +1155,7 @@ public abstract class IndexReader implements Cloneable,Closeable {
|
||||||
* field & term. This may return null, if either the
|
* field & term. This may return null, if either the
|
||||||
* field or term does not exist, or, positions were not
|
* field or term does not exist, or, positions were not
|
||||||
* stored for this term. */
|
* stored for this term. */
|
||||||
public DocsAndPositionsEnum termPositionsEnum(Bits skipDocs, String field, BytesRef term) throws IOException {
|
public DocsAndPositionsEnum termPositionsEnum(Bits liveDocs, String field, BytesRef term) throws IOException {
|
||||||
assert field != null;
|
assert field != null;
|
||||||
assert term != null;
|
assert term != null;
|
||||||
final Fields fields = fields();
|
final Fields fields = fields();
|
||||||
|
@ -1164,7 +1164,7 @@ public abstract class IndexReader implements Cloneable,Closeable {
|
||||||
}
|
}
|
||||||
final Terms terms = fields.terms(field);
|
final Terms terms = fields.terms(field);
|
||||||
if (terms != null) {
|
if (terms != null) {
|
||||||
return terms.docsAndPositions(skipDocs, term, null);
|
return terms.docsAndPositions(liveDocs, term, null);
|
||||||
} else {
|
} else {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -1175,7 +1175,7 @@ public abstract class IndexReader implements Cloneable,Closeable {
|
||||||
* {@link TermState}. This may return null, if either the field or the term
|
* {@link TermState}. This may return null, if either the field or the term
|
||||||
* does not exists or the {@link TermState} is invalid for the underlying
|
* does not exists or the {@link TermState} is invalid for the underlying
|
||||||
* implementation.*/
|
* implementation.*/
|
||||||
public DocsEnum termDocsEnum(Bits skipDocs, String field, BytesRef term, TermState state) throws IOException {
|
public DocsEnum termDocsEnum(Bits liveDocs, String field, BytesRef term, TermState state) throws IOException {
|
||||||
assert state != null;
|
assert state != null;
|
||||||
assert field != null;
|
assert field != null;
|
||||||
final Fields fields = fields();
|
final Fields fields = fields();
|
||||||
|
@ -1184,7 +1184,7 @@ public abstract class IndexReader implements Cloneable,Closeable {
|
||||||
}
|
}
|
||||||
final Terms terms = fields.terms(field);
|
final Terms terms = fields.terms(field);
|
||||||
if (terms != null) {
|
if (terms != null) {
|
||||||
return terms.docs(skipDocs, term, state, null);
|
return terms.docs(liveDocs, term, state, null);
|
||||||
} else {
|
} else {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -1195,7 +1195,7 @@ public abstract class IndexReader implements Cloneable,Closeable {
|
||||||
* {@link TermState}. This may return null, if either the field or the term
|
* {@link TermState}. This may return null, if either the field or the term
|
||||||
* does not exists, the {@link TermState} is invalid for the underlying
|
* does not exists, the {@link TermState} is invalid for the underlying
|
||||||
* implementation, or positions were not stored for this term.*/
|
* implementation, or positions were not stored for this term.*/
|
||||||
public DocsAndPositionsEnum termPositionsEnum(Bits skipDocs, String field, BytesRef term, TermState state) throws IOException {
|
public DocsAndPositionsEnum termPositionsEnum(Bits liveDocs, String field, BytesRef term, TermState state) throws IOException {
|
||||||
assert state != null;
|
assert state != null;
|
||||||
assert field != null;
|
assert field != null;
|
||||||
final Fields fields = fields();
|
final Fields fields = fields();
|
||||||
|
@ -1204,7 +1204,7 @@ public abstract class IndexReader implements Cloneable,Closeable {
|
||||||
}
|
}
|
||||||
final Terms terms = fields.terms(field);
|
final Terms terms = fields.terms(field);
|
||||||
if (terms != null) {
|
if (terms != null) {
|
||||||
return terms.docsAndPositions(skipDocs, term, state, null);
|
return terms.docsAndPositions(liveDocs, term, state, null);
|
||||||
} else {
|
} else {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -1260,7 +1260,7 @@ public abstract class IndexReader implements Cloneable,Closeable {
|
||||||
public int deleteDocuments(Term term) throws StaleReaderException, CorruptIndexException, LockObtainFailedException, IOException {
|
public int deleteDocuments(Term term) throws StaleReaderException, CorruptIndexException, LockObtainFailedException, IOException {
|
||||||
ensureOpen();
|
ensureOpen();
|
||||||
DocsEnum docs = MultiFields.getTermDocsEnum(this,
|
DocsEnum docs = MultiFields.getTermDocsEnum(this,
|
||||||
MultiFields.getDeletedDocs(this),
|
MultiFields.getLiveDocs(this),
|
||||||
term.field(),
|
term.field(),
|
||||||
term.bytes());
|
term.bytes());
|
||||||
if (docs == null) return 0;
|
if (docs == null) return 0;
|
||||||
|
@ -1385,15 +1385,17 @@ public abstract class IndexReader implements Cloneable,Closeable {
|
||||||
*/
|
*/
|
||||||
public abstract Collection<String> getFieldNames(FieldOption fldOption);
|
public abstract Collection<String> getFieldNames(FieldOption fldOption);
|
||||||
|
|
||||||
/** Returns the {@link Bits} representing deleted docs. A
|
/** Returns the {@link Bits} representing live (not
|
||||||
* set bit indicates the doc ID has been deleted. This
|
* deleted) docs. A set bit indicates the doc ID has not
|
||||||
* method should return null when there are no deleted
|
* been deleted. If this method returns null it means
|
||||||
* docs.
|
* there are no deleted documents (all documents are
|
||||||
|
* live).
|
||||||
*
|
*
|
||||||
* The returned instance has been safely published for use by
|
* The returned instance has been safely published for
|
||||||
* multiple threads without additional synchronization.
|
* use by multiple threads without additional
|
||||||
|
* synchronization.
|
||||||
* @lucene.experimental */
|
* @lucene.experimental */
|
||||||
public abstract Bits getDeletedDocs();
|
public abstract Bits getLiveDocs();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Expert: return the IndexCommit that this reader has
|
* Expert: return the IndexCommit that this reader has
|
||||||
|
|
|
@ -2214,8 +2214,8 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
|
||||||
|
|
||||||
// Must write deleted docs after the CFS so we don't
|
// Must write deleted docs after the CFS so we don't
|
||||||
// slurp the del file into CFS:
|
// slurp the del file into CFS:
|
||||||
if (flushedSegment.deletedDocuments != null) {
|
if (flushedSegment.liveDocs != null) {
|
||||||
final int delCount = flushedSegment.deletedDocuments.count();
|
final int delCount = flushedSegment.segmentInfo.docCount - flushedSegment.liveDocs.count();
|
||||||
assert delCount > 0;
|
assert delCount > 0;
|
||||||
newSegment.setDelCount(delCount);
|
newSegment.setDelCount(delCount);
|
||||||
newSegment.advanceDelGen();
|
newSegment.advanceDelGen();
|
||||||
|
@ -2230,7 +2230,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
|
||||||
// shortly-to-be-opened SegmentReader and let it
|
// shortly-to-be-opened SegmentReader and let it
|
||||||
// carry the changes; there's no reason to use
|
// carry the changes; there's no reason to use
|
||||||
// filesystem as intermediary here.
|
// filesystem as intermediary here.
|
||||||
flushedSegment.deletedDocuments.write(directory, delFileName);
|
flushedSegment.liveDocs.write(directory, delFileName);
|
||||||
success2 = true;
|
success2 = true;
|
||||||
} finally {
|
} finally {
|
||||||
if (!success2) {
|
if (!success2) {
|
||||||
|
@ -2931,9 +2931,9 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
|
||||||
// Reader was skipped because it was 100% deletions
|
// Reader was skipped because it was 100% deletions
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
final Bits prevDelDocs = previousReader.getDeletedDocs();
|
final Bits prevLiveDocs = previousReader.getLiveDocs();
|
||||||
final SegmentReader currentReader = merge.readers.get(i);
|
final SegmentReader currentReader = merge.readers.get(i);
|
||||||
final Bits currentDelDocs = currentReader.getDeletedDocs();
|
final Bits currentLiveDocs = currentReader.getLiveDocs();
|
||||||
if (previousReader.hasDeletions()) {
|
if (previousReader.hasDeletions()) {
|
||||||
|
|
||||||
// There were deletes on this segment when the merge
|
// There were deletes on this segment when the merge
|
||||||
|
@ -2948,10 +2948,10 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
|
||||||
// committed since we started the merge, so we
|
// committed since we started the merge, so we
|
||||||
// must merge them:
|
// must merge them:
|
||||||
for(int j=0;j<docCount;j++) {
|
for(int j=0;j<docCount;j++) {
|
||||||
if (prevDelDocs.get(j))
|
if (!prevLiveDocs.get(j))
|
||||||
assert currentDelDocs.get(j);
|
assert !currentLiveDocs.get(j);
|
||||||
else {
|
else {
|
||||||
if (currentDelDocs.get(j)) {
|
if (!currentLiveDocs.get(j)) {
|
||||||
mergedReader.doDelete(docUpto);
|
mergedReader.doDelete(docUpto);
|
||||||
delCount++;
|
delCount++;
|
||||||
}
|
}
|
||||||
|
@ -2965,7 +2965,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
|
||||||
// This segment had no deletes before but now it
|
// This segment had no deletes before but now it
|
||||||
// does:
|
// does:
|
||||||
for(int j=0; j<docCount; j++) {
|
for(int j=0; j<docCount; j++) {
|
||||||
if (currentDelDocs.get(j)) {
|
if (!currentLiveDocs.get(j)) {
|
||||||
mergedReader.doDelete(docUpto);
|
mergedReader.doDelete(docUpto);
|
||||||
delCount++;
|
delCount++;
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,6 @@ package org.apache.lucene.index;
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.apache.lucene.util.Bits;
|
|
||||||
import org.apache.lucene.util.ReaderUtil;
|
import org.apache.lucene.util.ReaderUtil;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
|
||||||
|
@ -35,7 +34,6 @@ public final class MultiDocsAndPositionsEnum extends DocsAndPositionsEnum {
|
||||||
int upto;
|
int upto;
|
||||||
DocsAndPositionsEnum current;
|
DocsAndPositionsEnum current;
|
||||||
int currentBase;
|
int currentBase;
|
||||||
Bits skipDocs;
|
|
||||||
int doc = -1;
|
int doc = -1;
|
||||||
|
|
||||||
MultiDocsAndPositionsEnum reset(final EnumWithSlice[] subs, final int numSubs) throws IOException {
|
MultiDocsAndPositionsEnum reset(final EnumWithSlice[] subs, final int numSubs) throws IOException {
|
||||||
|
|
|
@ -17,7 +17,6 @@ package org.apache.lucene.index;
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.apache.lucene.util.Bits;
|
|
||||||
import org.apache.lucene.util.ReaderUtil;
|
import org.apache.lucene.util.ReaderUtil;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
@ -33,7 +32,6 @@ public final class MultiDocsEnum extends DocsEnum {
|
||||||
int upto;
|
int upto;
|
||||||
DocsEnum current;
|
DocsEnum current;
|
||||||
int currentBase;
|
int currentBase;
|
||||||
Bits skipDocs;
|
|
||||||
int doc = -1;
|
int doc = -1;
|
||||||
|
|
||||||
MultiDocsEnum reset(final EnumWithSlice[] subs, final int numSubs) throws IOException {
|
MultiDocsEnum reset(final EnumWithSlice[] subs, final int numSubs) throws IOException {
|
||||||
|
|
|
@ -100,19 +100,19 @@ public final class MultiFields extends Fields {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Bits getDeletedDocs(IndexReader r) {
|
public static Bits getLiveDocs(IndexReader r) {
|
||||||
Bits result;
|
Bits result;
|
||||||
if (r.hasDeletions()) {
|
if (r.hasDeletions()) {
|
||||||
|
|
||||||
final List<Bits> delDocs = new ArrayList<Bits>();
|
final List<Bits> liveDocs = new ArrayList<Bits>();
|
||||||
final List<Integer> starts = new ArrayList<Integer>();
|
final List<Integer> starts = new ArrayList<Integer>();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
final int maxDoc = new ReaderUtil.Gather(r) {
|
final int maxDoc = new ReaderUtil.Gather(r) {
|
||||||
@Override
|
@Override
|
||||||
protected void add(int base, IndexReader r) throws IOException {
|
protected void add(int base, IndexReader r) throws IOException {
|
||||||
// record all delDocs, even if they are null
|
// record all liveDocs, even if they are null
|
||||||
delDocs.add(r.getDeletedDocs());
|
liveDocs.add(r.getLiveDocs());
|
||||||
starts.add(base);
|
starts.add(base);
|
||||||
}
|
}
|
||||||
}.run();
|
}.run();
|
||||||
|
@ -122,12 +122,12 @@ public final class MultiFields extends Fields {
|
||||||
throw new RuntimeException(ioe);
|
throw new RuntimeException(ioe);
|
||||||
}
|
}
|
||||||
|
|
||||||
assert delDocs.size() > 0;
|
assert liveDocs.size() > 0;
|
||||||
if (delDocs.size() == 1) {
|
if (liveDocs.size() == 1) {
|
||||||
// Only one actual sub reader -- optimize this case
|
// Only one actual sub reader -- optimize this case
|
||||||
result = delDocs.get(0);
|
result = liveDocs.get(0);
|
||||||
} else {
|
} else {
|
||||||
result = new MultiBits(delDocs, starts);
|
result = new MultiBits(liveDocs, starts, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
@ -150,12 +150,12 @@ public final class MultiFields extends Fields {
|
||||||
/** Returns {@link DocsEnum} for the specified field &
|
/** Returns {@link DocsEnum} for the specified field &
|
||||||
* term. This may return null if the term does not
|
* term. This may return null if the term does not
|
||||||
* exist. */
|
* exist. */
|
||||||
public static DocsEnum getTermDocsEnum(IndexReader r, Bits skipDocs, String field, BytesRef term) throws IOException {
|
public static DocsEnum getTermDocsEnum(IndexReader r, Bits liveDocs, String field, BytesRef term) throws IOException {
|
||||||
assert field != null;
|
assert field != null;
|
||||||
assert term != null;
|
assert term != null;
|
||||||
final Terms terms = getTerms(r, field);
|
final Terms terms = getTerms(r, field);
|
||||||
if (terms != null) {
|
if (terms != null) {
|
||||||
return terms.docs(skipDocs, term, null);
|
return terms.docs(liveDocs, term, null);
|
||||||
} else {
|
} else {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -164,12 +164,12 @@ public final class MultiFields extends Fields {
|
||||||
/** Returns {@link DocsAndPositionsEnum} for the specified
|
/** Returns {@link DocsAndPositionsEnum} for the specified
|
||||||
* field & term. This may return null if the term does
|
* field & term. This may return null if the term does
|
||||||
* not exist or positions were not indexed. */
|
* not exist or positions were not indexed. */
|
||||||
public static DocsAndPositionsEnum getTermPositionsEnum(IndexReader r, Bits skipDocs, String field, BytesRef term) throws IOException {
|
public static DocsAndPositionsEnum getTermPositionsEnum(IndexReader r, Bits liveDocs, String field, BytesRef term) throws IOException {
|
||||||
assert field != null;
|
assert field != null;
|
||||||
assert term != null;
|
assert term != null;
|
||||||
final Terms terms = getTerms(r, field);
|
final Terms terms = getTerms(r, field);
|
||||||
if (terms != null) {
|
if (terms != null) {
|
||||||
return terms.docsAndPositions(skipDocs, term, null);
|
return terms.docsAndPositions(liveDocs, term, null);
|
||||||
} else {
|
} else {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -141,8 +141,8 @@ public class MultiReader extends IndexReader implements Cloneable {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Bits getDeletedDocs() {
|
public Bits getLiveDocs() {
|
||||||
throw new UnsupportedOperationException("please use MultiFields.getDeletedDocs, or wrap your IndexReader with SlowMultiReaderWrapper, if you really need a top level Bits deletedDocs");
|
throw new UnsupportedOperationException("please use MultiFields.getLiveDocs, or wrap your IndexReader with SlowMultiReaderWrapper, if you really need a top level Bits liveDocs");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -346,7 +346,7 @@ public final class MultiTermsEnum extends TermsEnum {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsEnum docs(Bits skipDocs, DocsEnum reuse) throws IOException {
|
public DocsEnum docs(Bits liveDocs, DocsEnum reuse) throws IOException {
|
||||||
final MultiDocsEnum docsEnum;
|
final MultiDocsEnum docsEnum;
|
||||||
if (reuse != null) {
|
if (reuse != null) {
|
||||||
docsEnum = (MultiDocsEnum) reuse;
|
docsEnum = (MultiDocsEnum) reuse;
|
||||||
|
@ -354,11 +354,11 @@ public final class MultiTermsEnum extends TermsEnum {
|
||||||
docsEnum = new MultiDocsEnum();
|
docsEnum = new MultiDocsEnum();
|
||||||
}
|
}
|
||||||
|
|
||||||
final MultiBits multiSkipDocs;
|
final MultiBits multiLiveDocs;
|
||||||
if (skipDocs instanceof MultiBits) {
|
if (liveDocs instanceof MultiBits) {
|
||||||
multiSkipDocs = (MultiBits) skipDocs;
|
multiLiveDocs = (MultiBits) liveDocs;
|
||||||
} else {
|
} else {
|
||||||
multiSkipDocs = null;
|
multiLiveDocs = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
int upto = 0;
|
int upto = 0;
|
||||||
|
@ -369,22 +369,22 @@ public final class MultiTermsEnum extends TermsEnum {
|
||||||
|
|
||||||
final Bits b;
|
final Bits b;
|
||||||
|
|
||||||
if (multiSkipDocs != null) {
|
if (multiLiveDocs != null) {
|
||||||
// optimize for common case: requested skip docs is a
|
// optimize for common case: requested skip docs is a
|
||||||
// congruent sub-slice of MultiBits: in this case, we
|
// congruent sub-slice of MultiBits: in this case, we
|
||||||
// just pull the skipDocs from the sub reader, rather
|
// just pull the liveDocs from the sub reader, rather
|
||||||
// than making the inefficient
|
// than making the inefficient
|
||||||
// Slice(Multi(sub-readers)):
|
// Slice(Multi(sub-readers)):
|
||||||
final MultiBits.SubResult sub = multiSkipDocs.getMatchingSub(entry.subSlice);
|
final MultiBits.SubResult sub = multiLiveDocs.getMatchingSub(entry.subSlice);
|
||||||
if (sub.matches) {
|
if (sub.matches) {
|
||||||
b = sub.result;
|
b = sub.result;
|
||||||
} else {
|
} else {
|
||||||
// custom case: requested skip docs is foreign:
|
// custom case: requested skip docs is foreign:
|
||||||
// must slice it on every access
|
// must slice it on every access
|
||||||
b = new BitsSlice(skipDocs, entry.subSlice);
|
b = new BitsSlice(liveDocs, entry.subSlice);
|
||||||
}
|
}
|
||||||
} else if (skipDocs != null) {
|
} else if (liveDocs != null) {
|
||||||
b = new BitsSlice(skipDocs, entry.subSlice);
|
b = new BitsSlice(liveDocs, entry.subSlice);
|
||||||
} else {
|
} else {
|
||||||
// no deletions
|
// no deletions
|
||||||
b = null;
|
b = null;
|
||||||
|
@ -407,7 +407,7 @@ public final class MultiTermsEnum extends TermsEnum {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsAndPositionsEnum docsAndPositions(Bits skipDocs, DocsAndPositionsEnum reuse) throws IOException {
|
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse) throws IOException {
|
||||||
final MultiDocsAndPositionsEnum docsAndPositionsEnum;
|
final MultiDocsAndPositionsEnum docsAndPositionsEnum;
|
||||||
if (reuse != null) {
|
if (reuse != null) {
|
||||||
docsAndPositionsEnum = (MultiDocsAndPositionsEnum) reuse;
|
docsAndPositionsEnum = (MultiDocsAndPositionsEnum) reuse;
|
||||||
|
@ -415,11 +415,11 @@ public final class MultiTermsEnum extends TermsEnum {
|
||||||
docsAndPositionsEnum = new MultiDocsAndPositionsEnum();
|
docsAndPositionsEnum = new MultiDocsAndPositionsEnum();
|
||||||
}
|
}
|
||||||
|
|
||||||
final MultiBits multiSkipDocs;
|
final MultiBits multiLiveDocs;
|
||||||
if (skipDocs instanceof MultiBits) {
|
if (liveDocs instanceof MultiBits) {
|
||||||
multiSkipDocs = (MultiBits) skipDocs;
|
multiLiveDocs = (MultiBits) liveDocs;
|
||||||
} else {
|
} else {
|
||||||
multiSkipDocs = null;
|
multiLiveDocs = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
int upto = 0;
|
int upto = 0;
|
||||||
|
@ -430,23 +430,23 @@ public final class MultiTermsEnum extends TermsEnum {
|
||||||
|
|
||||||
final Bits b;
|
final Bits b;
|
||||||
|
|
||||||
if (multiSkipDocs != null) {
|
if (multiLiveDocs != null) {
|
||||||
// Optimize for common case: requested skip docs is a
|
// Optimize for common case: requested skip docs is a
|
||||||
// congruent sub-slice of MultiBits: in this case, we
|
// congruent sub-slice of MultiBits: in this case, we
|
||||||
// just pull the skipDocs from the sub reader, rather
|
// just pull the liveDocs from the sub reader, rather
|
||||||
// than making the inefficient
|
// than making the inefficient
|
||||||
// Slice(Multi(sub-readers)):
|
// Slice(Multi(sub-readers)):
|
||||||
final MultiBits.SubResult sub = multiSkipDocs.getMatchingSub(top[i].subSlice);
|
final MultiBits.SubResult sub = multiLiveDocs.getMatchingSub(top[i].subSlice);
|
||||||
if (sub.matches) {
|
if (sub.matches) {
|
||||||
b = sub.result;
|
b = sub.result;
|
||||||
} else {
|
} else {
|
||||||
// custom case: requested skip docs is foreign:
|
// custom case: requested skip docs is foreign:
|
||||||
// must slice it on every access (very
|
// must slice it on every access (very
|
||||||
// inefficient)
|
// inefficient)
|
||||||
b = new BitsSlice(skipDocs, top[i].subSlice);
|
b = new BitsSlice(liveDocs, top[i].subSlice);
|
||||||
}
|
}
|
||||||
} else if (skipDocs != null) {
|
} else if (liveDocs != null) {
|
||||||
b = new BitsSlice(skipDocs, top[i].subSlice);
|
b = new BitsSlice(liveDocs, top[i].subSlice);
|
||||||
} else {
|
} else {
|
||||||
// no deletions
|
// no deletions
|
||||||
b = null;
|
b = null;
|
||||||
|
|
|
@ -206,9 +206,9 @@ public class ParallelReader extends IndexReader {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Bits getDeletedDocs() {
|
public Bits getLiveDocs() {
|
||||||
return MultiFields.getDeletedDocs(readers.get(0));
|
return MultiFields.getLiveDocs(readers.get(0));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -282,11 +282,12 @@ final class SegmentMerger {
|
||||||
throws IOException, MergeAbortedException, CorruptIndexException {
|
throws IOException, MergeAbortedException, CorruptIndexException {
|
||||||
int docCount = 0;
|
int docCount = 0;
|
||||||
final int maxDoc = reader.maxDoc();
|
final int maxDoc = reader.maxDoc();
|
||||||
final Bits delDocs = reader.getDeletedDocs();
|
final Bits liveDocs = reader.getLiveDocs();
|
||||||
|
assert liveDocs != null;
|
||||||
if (matchingFieldsReader != null) {
|
if (matchingFieldsReader != null) {
|
||||||
// We can bulk-copy because the fieldInfos are "congruent"
|
// We can bulk-copy because the fieldInfos are "congruent"
|
||||||
for (int j = 0; j < maxDoc;) {
|
for (int j = 0; j < maxDoc;) {
|
||||||
if (delDocs.get(j)) {
|
if (!liveDocs.get(j)) {
|
||||||
// skip deleted docs
|
// skip deleted docs
|
||||||
++j;
|
++j;
|
||||||
continue;
|
continue;
|
||||||
|
@ -298,7 +299,7 @@ final class SegmentMerger {
|
||||||
j++;
|
j++;
|
||||||
numDocs++;
|
numDocs++;
|
||||||
if (j >= maxDoc) break;
|
if (j >= maxDoc) break;
|
||||||
if (delDocs.get(j)) {
|
if (!liveDocs.get(j)) {
|
||||||
j++;
|
j++;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -311,7 +312,7 @@ final class SegmentMerger {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
for (int j = 0; j < maxDoc; j++) {
|
for (int j = 0; j < maxDoc; j++) {
|
||||||
if (delDocs.get(j)) {
|
if (!liveDocs.get(j)) {
|
||||||
// skip deleted docs
|
// skip deleted docs
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -401,11 +402,11 @@ final class SegmentMerger {
|
||||||
final IndexReader reader)
|
final IndexReader reader)
|
||||||
throws IOException, MergeAbortedException {
|
throws IOException, MergeAbortedException {
|
||||||
final int maxDoc = reader.maxDoc();
|
final int maxDoc = reader.maxDoc();
|
||||||
final Bits delDocs = reader.getDeletedDocs();
|
final Bits liveDocs = reader.getLiveDocs();
|
||||||
if (matchingVectorsReader != null) {
|
if (matchingVectorsReader != null) {
|
||||||
// We can bulk-copy because the fieldInfos are "congruent"
|
// We can bulk-copy because the fieldInfos are "congruent"
|
||||||
for (int docNum = 0; docNum < maxDoc;) {
|
for (int docNum = 0; docNum < maxDoc;) {
|
||||||
if (delDocs.get(docNum)) {
|
if (!liveDocs.get(docNum)) {
|
||||||
// skip deleted docs
|
// skip deleted docs
|
||||||
++docNum;
|
++docNum;
|
||||||
continue;
|
continue;
|
||||||
|
@ -417,7 +418,7 @@ final class SegmentMerger {
|
||||||
docNum++;
|
docNum++;
|
||||||
numDocs++;
|
numDocs++;
|
||||||
if (docNum >= maxDoc) break;
|
if (docNum >= maxDoc) break;
|
||||||
if (delDocs.get(docNum)) {
|
if (!liveDocs.get(docNum)) {
|
||||||
docNum++;
|
docNum++;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -429,7 +430,7 @@ final class SegmentMerger {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
for (int docNum = 0; docNum < maxDoc; docNum++) {
|
for (int docNum = 0; docNum < maxDoc; docNum++) {
|
||||||
if (delDocs.get(docNum)) {
|
if (!liveDocs.get(docNum)) {
|
||||||
// skip deleted docs
|
// skip deleted docs
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -499,14 +500,14 @@ final class SegmentMerger {
|
||||||
if (f != null) {
|
if (f != null) {
|
||||||
slices.add(new ReaderUtil.Slice(docBase, maxDoc, fields.size()));
|
slices.add(new ReaderUtil.Slice(docBase, maxDoc, fields.size()));
|
||||||
fields.add(f);
|
fields.add(f);
|
||||||
bits.add(r.getDeletedDocs());
|
bits.add(r.getLiveDocs());
|
||||||
bitsStarts.add(docBase);
|
bitsStarts.add(docBase);
|
||||||
}
|
}
|
||||||
final PerDocValues producer = r.perDocValues();
|
final PerDocValues producer = r.perDocValues();
|
||||||
if (producer != null) {
|
if (producer != null) {
|
||||||
perDocSlices.add(new ReaderUtil.Slice(docBase, maxDoc, fields.size()));
|
perDocSlices.add(new ReaderUtil.Slice(docBase, maxDoc, fields.size()));
|
||||||
perDocProducers.add(producer);
|
perDocProducers.add(producer);
|
||||||
perDocBits.add(r.getDeletedDocs());
|
perDocBits.add(r.getLiveDocs());
|
||||||
perDocBitsStarts.add(docBase);
|
perDocBitsStarts.add(docBase);
|
||||||
}
|
}
|
||||||
docBase += maxDoc;
|
docBase += maxDoc;
|
||||||
|
@ -544,13 +545,13 @@ final class SegmentMerger {
|
||||||
inputDocBase += reader.maxDoc();
|
inputDocBase += reader.maxDoc();
|
||||||
if (mergeState.delCounts[i] != 0) {
|
if (mergeState.delCounts[i] != 0) {
|
||||||
int delCount = 0;
|
int delCount = 0;
|
||||||
final Bits delDocs = reader.getDeletedDocs();
|
final Bits liveDocs = reader.getLiveDocs();
|
||||||
assert delDocs != null;
|
assert liveDocs != null;
|
||||||
final int maxDoc = reader.maxDoc();
|
final int maxDoc = reader.maxDoc();
|
||||||
final int[] docMap = mergeState.docMaps[i] = new int[maxDoc];
|
final int[] docMap = mergeState.docMaps[i] = new int[maxDoc];
|
||||||
int newDocID = 0;
|
int newDocID = 0;
|
||||||
for(int j=0;j<maxDoc;j++) {
|
for(int j=0;j<maxDoc;j++) {
|
||||||
if (delDocs.get(j)) {
|
if (!liveDocs.get(j)) {
|
||||||
docMap[j] = -1;
|
docMap[j] = -1;
|
||||||
delCount++; // only for assert
|
delCount++; // only for assert
|
||||||
} else {
|
} else {
|
||||||
|
@ -571,7 +572,7 @@ final class SegmentMerger {
|
||||||
// MultiBits as our skip docs only to have it broken
|
// MultiBits as our skip docs only to have it broken
|
||||||
// apart when we step through the docs enums in
|
// apart when we step through the docs enums in
|
||||||
// MultiDocsEnum.
|
// MultiDocsEnum.
|
||||||
mergeState.multiDeletedDocs = new MultiBits(bits, bitsStarts);
|
mergeState.multiLiveDocs = new MultiBits(bits, bitsStarts, true);
|
||||||
|
|
||||||
consumer.merge(mergeState,
|
consumer.merge(mergeState,
|
||||||
new MultiFields(fields.toArray(Fields.EMPTY_ARRAY),
|
new MultiFields(fields.toArray(Fields.EMPTY_ARRAY),
|
||||||
|
@ -580,7 +581,7 @@ final class SegmentMerger {
|
||||||
consumer.close();
|
consumer.close();
|
||||||
}
|
}
|
||||||
if (!perDocSlices.isEmpty()) {
|
if (!perDocSlices.isEmpty()) {
|
||||||
mergeState.multiDeletedDocs = new MultiBits(perDocBits, perDocBitsStarts);
|
mergeState.multiLiveDocs = new MultiBits(perDocBits, perDocBitsStarts, true);
|
||||||
final PerDocConsumer docsConsumer = codec
|
final PerDocConsumer docsConsumer = codec
|
||||||
.docsConsumer(new PerDocWriteState(segmentWriteState));
|
.docsConsumer(new PerDocWriteState(segmentWriteState));
|
||||||
try {
|
try {
|
||||||
|
@ -592,7 +593,6 @@ final class SegmentMerger {
|
||||||
docsConsumer.close();
|
docsConsumer.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private MergeState mergeState;
|
private MergeState mergeState;
|
||||||
|
@ -635,9 +635,9 @@ final class SegmentMerger {
|
||||||
} else {
|
} else {
|
||||||
// this segment has deleted docs, so we have to
|
// this segment has deleted docs, so we have to
|
||||||
// check for every doc if it is deleted or not
|
// check for every doc if it is deleted or not
|
||||||
final Bits delDocs = reader.getDeletedDocs();
|
final Bits liveDocs = reader.getLiveDocs();
|
||||||
for (int k = 0; k < maxDoc; k++) {
|
for (int k = 0; k < maxDoc; k++) {
|
||||||
if (!delDocs.get(k)) {
|
if (liveDocs.get(k)) {
|
||||||
output.writeByte(normBuffer[k]);
|
output.writeByte(normBuffer[k]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -51,9 +51,9 @@ public class SegmentReader extends IndexReader implements Cloneable {
|
||||||
CloseableThreadLocal<FieldsReader> fieldsReaderLocal = new FieldsReaderLocal();
|
CloseableThreadLocal<FieldsReader> fieldsReaderLocal = new FieldsReaderLocal();
|
||||||
CloseableThreadLocal<TermVectorsReader> termVectorsLocal = new CloseableThreadLocal<TermVectorsReader>();
|
CloseableThreadLocal<TermVectorsReader> termVectorsLocal = new CloseableThreadLocal<TermVectorsReader>();
|
||||||
|
|
||||||
volatile BitVector deletedDocs;
|
volatile BitVector liveDocs;
|
||||||
AtomicInteger deletedDocsRef = null;
|
AtomicInteger liveDocsRef = null;
|
||||||
private boolean deletedDocsDirty = false;
|
private boolean liveDocsDirty = false;
|
||||||
private boolean normsDirty = false;
|
private boolean normsDirty = false;
|
||||||
|
|
||||||
// TODO: we should move this tracking into SegmentInfo;
|
// TODO: we should move this tracking into SegmentInfo;
|
||||||
|
@ -116,7 +116,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
|
||||||
if (doOpenStores) {
|
if (doOpenStores) {
|
||||||
instance.core.openDocStores(si);
|
instance.core.openDocStores(si);
|
||||||
}
|
}
|
||||||
instance.loadDeletedDocs();
|
instance.loadLiveDocs();
|
||||||
instance.openNorms(instance.core.cfsDir, readBufferSize);
|
instance.openNorms(instance.core.cfsDir, readBufferSize);
|
||||||
success = true;
|
success = true;
|
||||||
} finally {
|
} finally {
|
||||||
|
@ -138,34 +138,37 @@ public class SegmentReader extends IndexReader implements Cloneable {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Bits getDeletedDocs() {
|
public Bits getLiveDocs() {
|
||||||
return deletedDocs;
|
return liveDocs;
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean checkDeletedCounts() throws IOException {
|
private boolean checkLiveCounts() throws IOException {
|
||||||
final int recomputedCount = deletedDocs.getRecomputedCount();
|
final int recomputedCount = liveDocs.getRecomputedCount();
|
||||||
|
// First verify BitVector is self consistent:
|
||||||
assert deletedDocs.count() == recomputedCount : "deleted count=" + deletedDocs.count() + " vs recomputed count=" + recomputedCount;
|
assert liveDocs.count() == recomputedCount : "live count=" + liveDocs.count() + " vs recomputed count=" + recomputedCount;
|
||||||
|
|
||||||
assert si.getDelCount() == recomputedCount :
|
assert si.getDelCount() == si.docCount - recomputedCount :
|
||||||
"delete count mismatch: info=" + si.getDelCount() + " vs BitVector=" + recomputedCount;
|
"delete count mismatch: info=" + si.getDelCount() + " vs BitVector=" + (si.docCount-recomputedCount);
|
||||||
|
|
||||||
// Verify # deletes does not exceed maxDoc for this
|
// Verify # deletes does not exceed maxDoc for this
|
||||||
// segment:
|
// segment:
|
||||||
assert si.getDelCount() <= maxDoc() :
|
assert si.getDelCount() <= maxDoc() :
|
||||||
"delete count mismatch: " + recomputedCount + ") exceeds max doc (" + maxDoc() + ") for segment " + si.name;
|
"delete count mismatch: " + recomputedCount + ") exceeds max doc (" + maxDoc() + ") for segment " + si.name;
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void loadDeletedDocs() throws IOException {
|
private void loadLiveDocs() throws IOException {
|
||||||
// NOTE: the bitvector is stored using the regular directory, not cfs
|
// NOTE: the bitvector is stored using the regular directory, not cfs
|
||||||
if (hasDeletions(si)) {
|
if (hasDeletions(si)) {
|
||||||
deletedDocs = new BitVector(directory(), si.getDelFileName());
|
liveDocs = new BitVector(directory(), si.getDelFileName());
|
||||||
deletedDocsRef = new AtomicInteger(1);
|
if (liveDocs.getVersion() < BitVector.VERSION_DGAPS_CLEARED) {
|
||||||
assert checkDeletedCounts();
|
liveDocs.invertAll();
|
||||||
if (deletedDocs.size() != si.docCount) {
|
}
|
||||||
throw new CorruptIndexException("document count mismatch: deleted docs count " + deletedDocs.size() + " vs segment doc count " + si.docCount + " segment=" + si.name);
|
liveDocsRef = new AtomicInteger(1);
|
||||||
|
assert checkLiveCounts();
|
||||||
|
if (liveDocs.size() != si.docCount) {
|
||||||
|
throw new CorruptIndexException("document count mismatch: deleted docs count " + liveDocs.size() + " vs segment doc count " + si.docCount + " segment=" + si.name);
|
||||||
}
|
}
|
||||||
} else
|
} else
|
||||||
assert si.getDelCount() == 0;
|
assert si.getDelCount() == 0;
|
||||||
|
@ -256,27 +259,27 @@ public class SegmentReader extends IndexReader implements Cloneable {
|
||||||
|
|
||||||
if (!openReadOnly && hasChanges) {
|
if (!openReadOnly && hasChanges) {
|
||||||
// My pending changes transfer to the new reader
|
// My pending changes transfer to the new reader
|
||||||
clone.deletedDocsDirty = deletedDocsDirty;
|
clone.liveDocsDirty = liveDocsDirty;
|
||||||
clone.normsDirty = normsDirty;
|
clone.normsDirty = normsDirty;
|
||||||
clone.hasChanges = hasChanges;
|
clone.hasChanges = hasChanges;
|
||||||
hasChanges = false;
|
hasChanges = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (doClone) {
|
if (doClone) {
|
||||||
if (deletedDocs != null) {
|
if (liveDocs != null) {
|
||||||
deletedDocsRef.incrementAndGet();
|
liveDocsRef.incrementAndGet();
|
||||||
clone.deletedDocs = deletedDocs;
|
clone.liveDocs = liveDocs;
|
||||||
clone.deletedDocsRef = deletedDocsRef;
|
clone.liveDocsRef = liveDocsRef;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (!deletionsUpToDate) {
|
if (!deletionsUpToDate) {
|
||||||
// load deleted docs
|
// load deleted docs
|
||||||
assert clone.deletedDocs == null;
|
assert clone.liveDocs == null;
|
||||||
clone.loadDeletedDocs();
|
clone.loadLiveDocs();
|
||||||
} else if (deletedDocs != null) {
|
} else if (liveDocs != null) {
|
||||||
deletedDocsRef.incrementAndGet();
|
liveDocsRef.incrementAndGet();
|
||||||
clone.deletedDocs = deletedDocs;
|
clone.liveDocs = liveDocs;
|
||||||
clone.deletedDocsRef = deletedDocsRef;
|
clone.liveDocsRef = liveDocsRef;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -326,10 +329,10 @@ public class SegmentReader extends IndexReader implements Cloneable {
|
||||||
}
|
}
|
||||||
|
|
||||||
private synchronized void commitChanges(Map<String,String> commitUserData) throws IOException {
|
private synchronized void commitChanges(Map<String,String> commitUserData) throws IOException {
|
||||||
if (deletedDocsDirty) { // re-write deleted
|
if (liveDocsDirty) { // re-write deleted
|
||||||
si.advanceDelGen();
|
si.advanceDelGen();
|
||||||
|
|
||||||
assert deletedDocs.length() == si.docCount;
|
assert liveDocs.length() == si.docCount;
|
||||||
|
|
||||||
// We can write directly to the actual name (vs to a
|
// We can write directly to the actual name (vs to a
|
||||||
// .tmp & renaming it) because the file is not live
|
// .tmp & renaming it) because the file is not live
|
||||||
|
@ -337,7 +340,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
|
||||||
final String delFileName = si.getDelFileName();
|
final String delFileName = si.getDelFileName();
|
||||||
boolean success = false;
|
boolean success = false;
|
||||||
try {
|
try {
|
||||||
deletedDocs.write(directory(), delFileName);
|
liveDocs.write(directory(), delFileName);
|
||||||
success = true;
|
success = true;
|
||||||
} finally {
|
} finally {
|
||||||
if (!success) {
|
if (!success) {
|
||||||
|
@ -349,10 +352,9 @@ public class SegmentReader extends IndexReader implements Cloneable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
si.setDelCount(si.getDelCount()+pendingDeleteCount);
|
si.setDelCount(si.getDelCount()+pendingDeleteCount);
|
||||||
pendingDeleteCount = 0;
|
pendingDeleteCount = 0;
|
||||||
assert deletedDocs.count() == si.getDelCount(): "delete count mismatch during commit: info=" + si.getDelCount() + " vs BitVector=" + deletedDocs.count();
|
assert (maxDoc()-liveDocs.count()) == si.getDelCount(): "delete count mismatch during commit: info=" + si.getDelCount() + " vs BitVector=" + (maxDoc()-liveDocs.count());
|
||||||
} else {
|
} else {
|
||||||
assert pendingDeleteCount == 0;
|
assert pendingDeleteCount == 0;
|
||||||
}
|
}
|
||||||
|
@ -365,7 +367,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
deletedDocsDirty = false;
|
liveDocsDirty = false;
|
||||||
normsDirty = false;
|
normsDirty = false;
|
||||||
hasChanges = false;
|
hasChanges = false;
|
||||||
}
|
}
|
||||||
|
@ -379,10 +381,10 @@ public class SegmentReader extends IndexReader implements Cloneable {
|
||||||
termVectorsLocal.close();
|
termVectorsLocal.close();
|
||||||
fieldsReaderLocal.close();
|
fieldsReaderLocal.close();
|
||||||
|
|
||||||
if (deletedDocs != null) {
|
if (liveDocs != null) {
|
||||||
deletedDocsRef.decrementAndGet();
|
liveDocsRef.decrementAndGet();
|
||||||
// null so if an app hangs on to us we still free most ram
|
// null so if an app hangs on to us we still free most ram
|
||||||
deletedDocs = null;
|
liveDocs = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (final SegmentNorms norm : norms.values()) {
|
for (final SegmentNorms norm : norms.values()) {
|
||||||
|
@ -401,7 +403,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
|
||||||
@Override
|
@Override
|
||||||
public boolean hasDeletions() {
|
public boolean hasDeletions() {
|
||||||
// Don't call ensureOpen() here (it could affect performance)
|
// Don't call ensureOpen() here (it could affect performance)
|
||||||
return deletedDocs != null;
|
return liveDocs != null;
|
||||||
}
|
}
|
||||||
|
|
||||||
static boolean usesCompoundFile(SegmentInfo si) throws IOException {
|
static boolean usesCompoundFile(SegmentInfo si) throws IOException {
|
||||||
|
@ -414,38 +416,39 @@ public class SegmentReader extends IndexReader implements Cloneable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doDelete(int docNum) {
|
protected void doDelete(int docNum) {
|
||||||
if (deletedDocs == null) {
|
if (liveDocs == null) {
|
||||||
deletedDocs = new BitVector(maxDoc());
|
liveDocs = new BitVector(maxDoc());
|
||||||
deletedDocsRef = new AtomicInteger(1);
|
liveDocs.setAll();
|
||||||
|
liveDocsRef = new AtomicInteger(1);
|
||||||
}
|
}
|
||||||
// there is more than 1 SegmentReader with a reference to this
|
// there is more than 1 SegmentReader with a reference to this
|
||||||
// deletedDocs BitVector so decRef the current deletedDocsRef,
|
// liveDocs BitVector so decRef the current liveDocsRef,
|
||||||
// clone the BitVector, create a new deletedDocsRef
|
// clone the BitVector, create a new liveDocsRef
|
||||||
if (deletedDocsRef.get() > 1) {
|
if (liveDocsRef.get() > 1) {
|
||||||
AtomicInteger oldRef = deletedDocsRef;
|
AtomicInteger oldRef = liveDocsRef;
|
||||||
deletedDocs = cloneDeletedDocs(deletedDocs);
|
liveDocs = cloneDeletedDocs(liveDocs);
|
||||||
deletedDocsRef = new AtomicInteger(1);
|
liveDocsRef = new AtomicInteger(1);
|
||||||
oldRef.decrementAndGet();
|
oldRef.decrementAndGet();
|
||||||
}
|
}
|
||||||
deletedDocsDirty = true;
|
liveDocsDirty = true;
|
||||||
if (!deletedDocs.getAndSet(docNum)) {
|
if (liveDocs.getAndClear(docNum)) {
|
||||||
pendingDeleteCount++;
|
pendingDeleteCount++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doUndeleteAll() {
|
protected void doUndeleteAll() {
|
||||||
deletedDocsDirty = false;
|
liveDocsDirty = false;
|
||||||
if (deletedDocs != null) {
|
if (liveDocs != null) {
|
||||||
assert deletedDocsRef != null;
|
assert liveDocsRef != null;
|
||||||
deletedDocsRef.decrementAndGet();
|
liveDocsRef.decrementAndGet();
|
||||||
deletedDocs = null;
|
liveDocs = null;
|
||||||
deletedDocsRef = null;
|
liveDocsRef = null;
|
||||||
pendingDeleteCount = 0;
|
pendingDeleteCount = 0;
|
||||||
si.clearDelGen();
|
si.clearDelGen();
|
||||||
si.setDelCount(0);
|
si.setDelCount(0);
|
||||||
} else {
|
} else {
|
||||||
assert deletedDocsRef == null;
|
assert liveDocsRef == null;
|
||||||
assert pendingDeleteCount == 0;
|
assert pendingDeleteCount == 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -484,10 +487,11 @@ public class SegmentReader extends IndexReader implements Cloneable {
|
||||||
@Override
|
@Override
|
||||||
public int numDocs() {
|
public int numDocs() {
|
||||||
// Don't call ensureOpen() here (it could affect performance)
|
// Don't call ensureOpen() here (it could affect performance)
|
||||||
int n = maxDoc();
|
if (liveDocs != null) {
|
||||||
if (deletedDocs != null)
|
return liveDocs.count();
|
||||||
n -= deletedDocs.count();
|
} else {
|
||||||
return n;
|
return maxDoc();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -790,7 +794,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
|
||||||
void startCommit() {
|
void startCommit() {
|
||||||
rollbackSegmentInfo = (SegmentInfo) si.clone();
|
rollbackSegmentInfo = (SegmentInfo) si.clone();
|
||||||
rollbackHasChanges = hasChanges;
|
rollbackHasChanges = hasChanges;
|
||||||
rollbackDeletedDocsDirty = deletedDocsDirty;
|
rollbackDeletedDocsDirty = liveDocsDirty;
|
||||||
rollbackNormsDirty = normsDirty;
|
rollbackNormsDirty = normsDirty;
|
||||||
rollbackPendingDeleteCount = pendingDeleteCount;
|
rollbackPendingDeleteCount = pendingDeleteCount;
|
||||||
for (SegmentNorms norm : norms.values()) {
|
for (SegmentNorms norm : norms.values()) {
|
||||||
|
@ -801,7 +805,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
|
||||||
void rollbackCommit() {
|
void rollbackCommit() {
|
||||||
si.reset(rollbackSegmentInfo);
|
si.reset(rollbackSegmentInfo);
|
||||||
hasChanges = rollbackHasChanges;
|
hasChanges = rollbackHasChanges;
|
||||||
deletedDocsDirty = rollbackDeletedDocsDirty;
|
liveDocsDirty = rollbackDeletedDocsDirty;
|
||||||
normsDirty = rollbackNormsDirty;
|
normsDirty = rollbackNormsDirty;
|
||||||
pendingDeleteCount = rollbackPendingDeleteCount;
|
pendingDeleteCount = rollbackPendingDeleteCount;
|
||||||
for (SegmentNorms norm : norms.values()) {
|
for (SegmentNorms norm : norms.values()) {
|
||||||
|
|
|
@ -40,7 +40,7 @@ public class SegmentWriteState {
|
||||||
public final BufferedDeletes segDeletes;
|
public final BufferedDeletes segDeletes;
|
||||||
|
|
||||||
// Lazily created:
|
// Lazily created:
|
||||||
public BitVector deletedDocs;
|
public BitVector liveDocs;
|
||||||
|
|
||||||
final SegmentCodecs segmentCodecs;
|
final SegmentCodecs segmentCodecs;
|
||||||
public final int codecId;
|
public final int codecId;
|
||||||
|
|
|
@ -76,10 +76,9 @@ public final class SlowMultiReaderWrapper extends FilterIndexReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Bits getDeletedDocs() {
|
public Bits getLiveDocs() {
|
||||||
return MultiFields.getDeletedDocs(in);
|
return MultiFields.getLiveDocs(in);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public IndexReader[] getSequentialSubReaders() {
|
public IndexReader[] getSequentialSubReaders() {
|
||||||
|
|
|
@ -74,10 +74,10 @@ public abstract class Terms {
|
||||||
|
|
||||||
/** Get {@link DocsEnum} for the specified term. This
|
/** Get {@link DocsEnum} for the specified term. This
|
||||||
* method may return null if the term does not exist. */
|
* method may return null if the term does not exist. */
|
||||||
public DocsEnum docs(Bits skipDocs, BytesRef text, DocsEnum reuse) throws IOException {
|
public DocsEnum docs(Bits liveDocs, BytesRef text, DocsEnum reuse) throws IOException {
|
||||||
final TermsEnum termsEnum = getThreadTermsEnum();
|
final TermsEnum termsEnum = getThreadTermsEnum();
|
||||||
if (termsEnum.seekExact(text, true)) {
|
if (termsEnum.seekExact(text, true)) {
|
||||||
return termsEnum.docs(skipDocs, reuse);
|
return termsEnum.docs(liveDocs, reuse);
|
||||||
} else {
|
} else {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -86,10 +86,10 @@ public abstract class Terms {
|
||||||
/** Get {@link DocsEnum} for the specified term. This
|
/** Get {@link DocsEnum} for the specified term. This
|
||||||
* method will may return null if the term does not
|
* method will may return null if the term does not
|
||||||
* exists, or positions were not indexed. */
|
* exists, or positions were not indexed. */
|
||||||
public DocsAndPositionsEnum docsAndPositions(Bits skipDocs, BytesRef text, DocsAndPositionsEnum reuse) throws IOException {
|
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, BytesRef text, DocsAndPositionsEnum reuse) throws IOException {
|
||||||
final TermsEnum termsEnum = getThreadTermsEnum();
|
final TermsEnum termsEnum = getThreadTermsEnum();
|
||||||
if (termsEnum.seekExact(text, true)) {
|
if (termsEnum.seekExact(text, true)) {
|
||||||
return termsEnum.docsAndPositions(skipDocs, reuse);
|
return termsEnum.docsAndPositions(liveDocs, reuse);
|
||||||
} else {
|
} else {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -101,10 +101,10 @@ public abstract class Terms {
|
||||||
*
|
*
|
||||||
* @see TermsEnum#termState()
|
* @see TermsEnum#termState()
|
||||||
* @see TermsEnum#seekExact(BytesRef, TermState) */
|
* @see TermsEnum#seekExact(BytesRef, TermState) */
|
||||||
public DocsEnum docs(Bits skipDocs, BytesRef term, TermState termState, DocsEnum reuse) throws IOException {
|
public DocsEnum docs(Bits liveDocs, BytesRef term, TermState termState, DocsEnum reuse) throws IOException {
|
||||||
final TermsEnum termsEnum = getThreadTermsEnum();
|
final TermsEnum termsEnum = getThreadTermsEnum();
|
||||||
termsEnum.seekExact(term, termState);
|
termsEnum.seekExact(term, termState);
|
||||||
return termsEnum.docs(skipDocs, reuse);
|
return termsEnum.docs(liveDocs, reuse);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -114,10 +114,10 @@ public abstract class Terms {
|
||||||
*
|
*
|
||||||
* @see TermsEnum#termState()
|
* @see TermsEnum#termState()
|
||||||
* @see TermsEnum#seekExact(BytesRef, TermState) */
|
* @see TermsEnum#seekExact(BytesRef, TermState) */
|
||||||
public DocsAndPositionsEnum docsAndPositions(Bits skipDocs, BytesRef term, TermState termState, DocsAndPositionsEnum reuse) throws IOException {
|
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, BytesRef term, TermState termState, DocsAndPositionsEnum reuse) throws IOException {
|
||||||
final TermsEnum termsEnum = getThreadTermsEnum();
|
final TermsEnum termsEnum = getThreadTermsEnum();
|
||||||
termsEnum.seekExact(term, termState);
|
termsEnum.seekExact(term, termState);
|
||||||
return termsEnum.docsAndPositions(skipDocs, reuse);
|
return termsEnum.docsAndPositions(liveDocs, reuse);
|
||||||
}
|
}
|
||||||
|
|
||||||
public long getUniqueTermCount() throws IOException {
|
public long getUniqueTermCount() throws IOException {
|
||||||
|
|
|
@ -147,16 +147,16 @@ public abstract class TermsEnum {
|
||||||
* call this when the enum is unpositioned. This method
|
* call this when the enum is unpositioned. This method
|
||||||
* will not return null.
|
* will not return null.
|
||||||
*
|
*
|
||||||
* @param skipDocs set bits are documents that should not
|
* @param liveDocs set bits are documents that should not
|
||||||
* be returned
|
* be returned
|
||||||
* @param reuse pass a prior DocsEnum for possible reuse */
|
* @param reuse pass a prior DocsEnum for possible reuse */
|
||||||
public abstract DocsEnum docs(Bits skipDocs, DocsEnum reuse) throws IOException;
|
public abstract DocsEnum docs(Bits liveDocs, DocsEnum reuse) throws IOException;
|
||||||
|
|
||||||
/** Get {@link DocsAndPositionsEnum} for the current term.
|
/** Get {@link DocsAndPositionsEnum} for the current term.
|
||||||
* Do not call this when the enum is unpositioned.
|
* Do not call this when the enum is unpositioned.
|
||||||
* This method will only return null if positions were
|
* This method will only return null if positions were
|
||||||
* not indexed into the postings by this codec. */
|
* not indexed into the postings by this codec. */
|
||||||
public abstract DocsAndPositionsEnum docsAndPositions(Bits skipDocs, DocsAndPositionsEnum reuse) throws IOException;
|
public abstract DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse) throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Expert: Returns the TermsEnums internal state to position the TermsEnum
|
* Expert: Returns the TermsEnums internal state to position the TermsEnum
|
||||||
|
@ -224,12 +224,12 @@ public abstract class TermsEnum {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsEnum docs(Bits bits, DocsEnum reuse) {
|
public DocsEnum docs(Bits liveDocs, DocsEnum reuse) {
|
||||||
throw new IllegalStateException("this method should never be called");
|
throw new IllegalStateException("this method should never be called");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsAndPositionsEnum docsAndPositions(Bits bits, DocsAndPositionsEnum reuse) {
|
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse) {
|
||||||
throw new IllegalStateException("this method should never be called");
|
throw new IllegalStateException("this method should never be called");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -688,23 +688,23 @@ public class BlockTermsReader extends FieldsProducer {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsEnum docs(Bits skipDocs, DocsEnum reuse) throws IOException {
|
public DocsEnum docs(Bits liveDocs, DocsEnum reuse) throws IOException {
|
||||||
//System.out.println("BTR.docs this=" + this);
|
//System.out.println("BTR.docs this=" + this);
|
||||||
decodeMetaData();
|
decodeMetaData();
|
||||||
//System.out.println(" state.docFreq=" + state.docFreq);
|
//System.out.println(" state.docFreq=" + state.docFreq);
|
||||||
final DocsEnum docsEnum = postingsReader.docs(fieldInfo, state, skipDocs, reuse);
|
final DocsEnum docsEnum = postingsReader.docs(fieldInfo, state, liveDocs, reuse);
|
||||||
assert docsEnum != null;
|
assert docsEnum != null;
|
||||||
return docsEnum;
|
return docsEnum;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsAndPositionsEnum docsAndPositions(Bits skipDocs, DocsAndPositionsEnum reuse) throws IOException {
|
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse) throws IOException {
|
||||||
//System.out.println("BTR.d&p this=" + this);
|
//System.out.println("BTR.d&p this=" + this);
|
||||||
decodeMetaData();
|
decodeMetaData();
|
||||||
if (fieldInfo.omitTermFreqAndPositions) {
|
if (fieldInfo.omitTermFreqAndPositions) {
|
||||||
return null;
|
return null;
|
||||||
} else {
|
} else {
|
||||||
DocsAndPositionsEnum dpe = postingsReader.docsAndPositions(fieldInfo, state, skipDocs, reuse);
|
DocsAndPositionsEnum dpe = postingsReader.docsAndPositions(fieldInfo, state, liveDocs, reuse);
|
||||||
//System.out.println(" return d&pe=" + dpe);
|
//System.out.println(" return d&pe=" + dpe);
|
||||||
return dpe;
|
return dpe;
|
||||||
}
|
}
|
||||||
|
|
|
@ -114,8 +114,8 @@ public abstract class DocValuesConsumer {
|
||||||
final IndexDocValues r = reader.docValues(mergeState.fieldInfo.name);
|
final IndexDocValues r = reader.docValues(mergeState.fieldInfo.name);
|
||||||
if (r != null) {
|
if (r != null) {
|
||||||
merged = true;
|
merged = true;
|
||||||
merge(new Writer.MergeState(r, docBase, reader.maxDoc(), reader
|
merge(new Writer.MergeState(r, docBase, reader.maxDoc(),
|
||||||
.getDeletedDocs()));
|
reader.getLiveDocs()));
|
||||||
}
|
}
|
||||||
docBase += reader.numDocs();
|
docBase += reader.numDocs();
|
||||||
}
|
}
|
||||||
|
@ -152,15 +152,15 @@ public abstract class DocValuesConsumer {
|
||||||
public final int docBase;
|
public final int docBase;
|
||||||
/** the number of documents in this MergeState */
|
/** the number of documents in this MergeState */
|
||||||
public final int docCount;
|
public final int docCount;
|
||||||
/** the deleted bits for this MergeState */
|
/** the not deleted bits for this MergeState */
|
||||||
public final Bits bits;
|
public final Bits liveDocs;
|
||||||
|
|
||||||
public MergeState(IndexDocValues reader, int docBase, int docCount, Bits bits) {
|
public MergeState(IndexDocValues reader, int docBase, int docCount, Bits liveDocs) {
|
||||||
assert reader != null;
|
assert reader != null;
|
||||||
this.reader = reader;
|
this.reader = reader;
|
||||||
this.docBase = docBase;
|
this.docBase = docBase;
|
||||||
this.docCount = docCount;
|
this.docCount = docCount;
|
||||||
this.bits = bits;
|
this.liveDocs = liveDocs;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,7 +39,7 @@ public class MergeState {
|
||||||
public int[] delCounts; // Deletion count per reader
|
public int[] delCounts; // Deletion count per reader
|
||||||
public int[] docBase; // New docID base per reader
|
public int[] docBase; // New docID base per reader
|
||||||
public int mergedDocCount; // Total # merged docs
|
public int mergedDocCount; // Total # merged docs
|
||||||
public Bits multiDeletedDocs;
|
public Bits multiLiveDocs;
|
||||||
public CheckAbort checkAbort;
|
public CheckAbort checkAbort;
|
||||||
|
|
||||||
// Updated per field;
|
// Updated per field;
|
||||||
|
|
|
@ -49,11 +49,11 @@ public abstract class PostingsReaderBase implements Closeable {
|
||||||
|
|
||||||
/** Must fully consume state, since after this call that
|
/** Must fully consume state, since after this call that
|
||||||
* TermState may be reused. */
|
* TermState may be reused. */
|
||||||
public abstract DocsEnum docs(FieldInfo fieldInfo, BlockTermState state, Bits skipDocs, DocsEnum reuse) throws IOException;
|
public abstract DocsEnum docs(FieldInfo fieldInfo, BlockTermState state, Bits liveDocs, DocsEnum reuse) throws IOException;
|
||||||
|
|
||||||
/** Must fully consume state, since after this call that
|
/** Must fully consume state, since after this call that
|
||||||
* TermState may be reused. */
|
* TermState may be reused. */
|
||||||
public abstract DocsAndPositionsEnum docsAndPositions(FieldInfo fieldInfo, BlockTermState state, Bits skipDocs, DocsAndPositionsEnum reuse) throws IOException;
|
public abstract DocsAndPositionsEnum docsAndPositions(FieldInfo fieldInfo, BlockTermState state, Bits liveDocs, DocsAndPositionsEnum reuse) throws IOException;
|
||||||
|
|
||||||
public abstract void close() throws IOException;
|
public abstract void close() throws IOException;
|
||||||
|
|
||||||
|
|
|
@ -67,7 +67,7 @@ public abstract class TermsConsumer {
|
||||||
MultiDocsEnum docsEnumIn = null;
|
MultiDocsEnum docsEnumIn = null;
|
||||||
|
|
||||||
while((term = termsEnum.next()) != null) {
|
while((term = termsEnum.next()) != null) {
|
||||||
docsEnumIn = (MultiDocsEnum) termsEnum.docs(mergeState.multiDeletedDocs, docsEnumIn);
|
docsEnumIn = (MultiDocsEnum) termsEnum.docs(mergeState.multiLiveDocs, docsEnumIn);
|
||||||
if (docsEnumIn != null) {
|
if (docsEnumIn != null) {
|
||||||
docsEnum.reset(docsEnumIn);
|
docsEnum.reset(docsEnumIn);
|
||||||
final PostingsConsumer postingsConsumer = startTerm(term);
|
final PostingsConsumer postingsConsumer = startTerm(term);
|
||||||
|
@ -89,7 +89,7 @@ public abstract class TermsConsumer {
|
||||||
postingsEnum.setMergeState(mergeState);
|
postingsEnum.setMergeState(mergeState);
|
||||||
MultiDocsAndPositionsEnum postingsEnumIn = null;
|
MultiDocsAndPositionsEnum postingsEnumIn = null;
|
||||||
while((term = termsEnum.next()) != null) {
|
while((term = termsEnum.next()) != null) {
|
||||||
postingsEnumIn = (MultiDocsAndPositionsEnum) termsEnum.docsAndPositions(mergeState.multiDeletedDocs, postingsEnumIn);
|
postingsEnumIn = (MultiDocsAndPositionsEnum) termsEnum.docsAndPositions(mergeState.multiLiveDocs, postingsEnumIn);
|
||||||
if (postingsEnumIn != null) {
|
if (postingsEnumIn != null) {
|
||||||
postingsEnum.reset(postingsEnumIn);
|
postingsEnum.reset(postingsEnumIn);
|
||||||
// set PayloadProcessor
|
// set PayloadProcessor
|
||||||
|
|
|
@ -269,7 +269,7 @@ public class MemoryCodec extends Codec {
|
||||||
private byte[] buffer = new byte[16];
|
private byte[] buffer = new byte[16];
|
||||||
private final ByteArrayDataInput in = new ByteArrayDataInput(buffer);
|
private final ByteArrayDataInput in = new ByteArrayDataInput(buffer);
|
||||||
|
|
||||||
private Bits skipDocs;
|
private Bits liveDocs;
|
||||||
private int docUpto;
|
private int docUpto;
|
||||||
private int docID;
|
private int docID;
|
||||||
private int freq;
|
private int freq;
|
||||||
|
@ -285,14 +285,14 @@ public class MemoryCodec extends Codec {
|
||||||
return omitTFAP == this.omitTFAP && storePayloads == this.storePayloads;
|
return omitTFAP == this.omitTFAP && storePayloads == this.storePayloads;
|
||||||
}
|
}
|
||||||
|
|
||||||
public FSTDocsEnum reset(BytesRef bufferIn, Bits skipDocs, int numDocs) {
|
public FSTDocsEnum reset(BytesRef bufferIn, Bits liveDocs, int numDocs) {
|
||||||
assert numDocs > 0;
|
assert numDocs > 0;
|
||||||
if (buffer.length < bufferIn.length - bufferIn.offset) {
|
if (buffer.length < bufferIn.length - bufferIn.offset) {
|
||||||
buffer = ArrayUtil.grow(buffer, bufferIn.length - bufferIn.offset);
|
buffer = ArrayUtil.grow(buffer, bufferIn.length - bufferIn.offset);
|
||||||
}
|
}
|
||||||
in.reset(buffer, 0, bufferIn.length - bufferIn.offset);
|
in.reset(buffer, 0, bufferIn.length - bufferIn.offset);
|
||||||
System.arraycopy(bufferIn.bytes, bufferIn.offset, buffer, 0, bufferIn.length - bufferIn.offset);
|
System.arraycopy(bufferIn.bytes, bufferIn.offset, buffer, 0, bufferIn.length - bufferIn.offset);
|
||||||
this.skipDocs = skipDocs;
|
this.liveDocs = liveDocs;
|
||||||
docID = 0;
|
docID = 0;
|
||||||
docUpto = 0;
|
docUpto = 0;
|
||||||
payloadLen = 0;
|
payloadLen = 0;
|
||||||
|
@ -339,7 +339,7 @@ public class MemoryCodec extends Codec {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (skipDocs == null || !skipDocs.get(docID)) {
|
if (liveDocs == null || liveDocs.get(docID)) {
|
||||||
if (VERBOSE) System.out.println(" return docID=" + docID + " freq=" + freq);
|
if (VERBOSE) System.out.println(" return docID=" + docID + " freq=" + freq);
|
||||||
return docID;
|
return docID;
|
||||||
}
|
}
|
||||||
|
@ -375,7 +375,7 @@ public class MemoryCodec extends Codec {
|
||||||
private byte[] buffer = new byte[16];
|
private byte[] buffer = new byte[16];
|
||||||
private final ByteArrayDataInput in = new ByteArrayDataInput(buffer);
|
private final ByteArrayDataInput in = new ByteArrayDataInput(buffer);
|
||||||
|
|
||||||
private Bits skipDocs;
|
private Bits liveDocs;
|
||||||
private int docUpto;
|
private int docUpto;
|
||||||
private int docID;
|
private int docID;
|
||||||
private int freq;
|
private int freq;
|
||||||
|
@ -396,7 +396,7 @@ public class MemoryCodec extends Codec {
|
||||||
return omitTFAP == this.omitTFAP && storePayloads == this.storePayloads;
|
return omitTFAP == this.omitTFAP && storePayloads == this.storePayloads;
|
||||||
}
|
}
|
||||||
|
|
||||||
public FSTDocsAndPositionsEnum reset(BytesRef bufferIn, Bits skipDocs, int numDocs) {
|
public FSTDocsAndPositionsEnum reset(BytesRef bufferIn, Bits liveDocs, int numDocs) {
|
||||||
assert numDocs > 0;
|
assert numDocs > 0;
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println("D&P reset bytes this=" + this);
|
System.out.println("D&P reset bytes this=" + this);
|
||||||
|
@ -409,7 +409,7 @@ public class MemoryCodec extends Codec {
|
||||||
}
|
}
|
||||||
in.reset(buffer, 0, bufferIn.length - bufferIn.offset);
|
in.reset(buffer, 0, bufferIn.length - bufferIn.offset);
|
||||||
System.arraycopy(bufferIn.bytes, bufferIn.offset, buffer, 0, bufferIn.length - bufferIn.offset);
|
System.arraycopy(bufferIn.bytes, bufferIn.offset, buffer, 0, bufferIn.length - bufferIn.offset);
|
||||||
this.skipDocs = skipDocs;
|
this.liveDocs = liveDocs;
|
||||||
docID = 0;
|
docID = 0;
|
||||||
docUpto = 0;
|
docUpto = 0;
|
||||||
payload.bytes = buffer;
|
payload.bytes = buffer;
|
||||||
|
@ -446,7 +446,7 @@ public class MemoryCodec extends Codec {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (skipDocs == null || !skipDocs.get(docID)) {
|
if (liveDocs == null || liveDocs.get(docID)) {
|
||||||
pos = 0;
|
pos = 0;
|
||||||
posPending = freq;
|
posPending = freq;
|
||||||
if (VERBOSE) System.out.println(" return docID=" + docID + " freq=" + freq);
|
if (VERBOSE) System.out.println(" return docID=" + docID + " freq=" + freq);
|
||||||
|
@ -598,7 +598,7 @@ public class MemoryCodec extends Codec {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsEnum docs(Bits skipDocs, DocsEnum reuse) throws IOException {
|
public DocsEnum docs(Bits liveDocs, DocsEnum reuse) throws IOException {
|
||||||
decodeMetaData();
|
decodeMetaData();
|
||||||
FSTDocsEnum docsEnum;
|
FSTDocsEnum docsEnum;
|
||||||
if (reuse == null || !(reuse instanceof FSTDocsEnum)) {
|
if (reuse == null || !(reuse instanceof FSTDocsEnum)) {
|
||||||
|
@ -609,11 +609,11 @@ public class MemoryCodec extends Codec {
|
||||||
docsEnum = new FSTDocsEnum(field.omitTermFreqAndPositions, field.storePayloads);
|
docsEnum = new FSTDocsEnum(field.omitTermFreqAndPositions, field.storePayloads);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return docsEnum.reset(current.output, skipDocs, docFreq);
|
return docsEnum.reset(current.output, liveDocs, docFreq);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsAndPositionsEnum docsAndPositions(Bits skipDocs, DocsAndPositionsEnum reuse) throws IOException {
|
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse) throws IOException {
|
||||||
if (field.omitTermFreqAndPositions) {
|
if (field.omitTermFreqAndPositions) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -628,7 +628,7 @@ public class MemoryCodec extends Codec {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (VERBOSE) System.out.println("D&P reset this=" + this);
|
if (VERBOSE) System.out.println("D&P reset this=" + this);
|
||||||
return docsAndPositionsEnum.reset(current.output, skipDocs, docFreq);
|
return docsAndPositionsEnum.reset(current.output, liveDocs, docFreq);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -951,7 +951,7 @@ public class PreFlexFields extends FieldsProducer {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsEnum docs(Bits skipDocs, DocsEnum reuse) throws IOException {
|
public DocsEnum docs(Bits liveDocs, DocsEnum reuse) throws IOException {
|
||||||
PreDocsEnum docsEnum;
|
PreDocsEnum docsEnum;
|
||||||
if (reuse == null || !(reuse instanceof PreDocsEnum)) {
|
if (reuse == null || !(reuse instanceof PreDocsEnum)) {
|
||||||
docsEnum = new PreDocsEnum();
|
docsEnum = new PreDocsEnum();
|
||||||
|
@ -961,11 +961,11 @@ public class PreFlexFields extends FieldsProducer {
|
||||||
docsEnum = new PreDocsEnum();
|
docsEnum = new PreDocsEnum();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return docsEnum.reset(termEnum, skipDocs);
|
return docsEnum.reset(termEnum, liveDocs);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsAndPositionsEnum docsAndPositions(Bits skipDocs, DocsAndPositionsEnum reuse) throws IOException {
|
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse) throws IOException {
|
||||||
PreDocsAndPositionsEnum docsPosEnum;
|
PreDocsAndPositionsEnum docsPosEnum;
|
||||||
if (fieldInfo.omitTermFreqAndPositions) {
|
if (fieldInfo.omitTermFreqAndPositions) {
|
||||||
return null;
|
return null;
|
||||||
|
@ -977,7 +977,7 @@ public class PreFlexFields extends FieldsProducer {
|
||||||
docsPosEnum = new PreDocsAndPositionsEnum();
|
docsPosEnum = new PreDocsAndPositionsEnum();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return docsPosEnum.reset(termEnum, skipDocs);
|
return docsPosEnum.reset(termEnum, liveDocs);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -992,8 +992,8 @@ public class PreFlexFields extends FieldsProducer {
|
||||||
return freqStream;
|
return freqStream;
|
||||||
}
|
}
|
||||||
|
|
||||||
public PreDocsEnum reset(SegmentTermEnum termEnum, Bits skipDocs) throws IOException {
|
public PreDocsEnum reset(SegmentTermEnum termEnum, Bits liveDocs) throws IOException {
|
||||||
docs.setSkipDocs(skipDocs);
|
docs.setLiveDocs(liveDocs);
|
||||||
docs.seek(termEnum);
|
docs.seek(termEnum);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
@ -1048,8 +1048,8 @@ public class PreFlexFields extends FieldsProducer {
|
||||||
return freqStream;
|
return freqStream;
|
||||||
}
|
}
|
||||||
|
|
||||||
public DocsAndPositionsEnum reset(SegmentTermEnum termEnum, Bits skipDocs) throws IOException {
|
public DocsAndPositionsEnum reset(SegmentTermEnum termEnum, Bits liveDocs) throws IOException {
|
||||||
pos.setSkipDocs(skipDocs);
|
pos.setLiveDocs(liveDocs);
|
||||||
pos.seek(termEnum);
|
pos.seek(termEnum);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,7 +33,7 @@ public class SegmentTermDocs {
|
||||||
//protected SegmentReader parent;
|
//protected SegmentReader parent;
|
||||||
private final FieldInfos fieldInfos;
|
private final FieldInfos fieldInfos;
|
||||||
private final TermInfosReader tis;
|
private final TermInfosReader tis;
|
||||||
protected Bits skipDocs;
|
protected Bits liveDocs;
|
||||||
protected IndexInput freqStream;
|
protected IndexInput freqStream;
|
||||||
protected int count;
|
protected int count;
|
||||||
protected int df;
|
protected int df;
|
||||||
|
@ -53,18 +53,6 @@ public class SegmentTermDocs {
|
||||||
protected boolean currentFieldStoresPayloads;
|
protected boolean currentFieldStoresPayloads;
|
||||||
protected boolean currentFieldOmitTermFreqAndPositions;
|
protected boolean currentFieldOmitTermFreqAndPositions;
|
||||||
|
|
||||||
/*
|
|
||||||
protected SegmentTermDocs(SegmentReader parent) {
|
|
||||||
this.parent = parent;
|
|
||||||
this.freqStream = (IndexInput) parent.core.freqStream.clone();
|
|
||||||
synchronized (parent) {
|
|
||||||
this.deletedDocs = parent.deletedDocs;
|
|
||||||
}
|
|
||||||
this.skipInterval = parent.core.getTermsReader().getSkipInterval();
|
|
||||||
this.maxSkipLevels = parent.core.getTermsReader().getMaxSkipLevels();
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
public SegmentTermDocs(IndexInput freqStream, TermInfosReader tis, FieldInfos fieldInfos) {
|
public SegmentTermDocs(IndexInput freqStream, TermInfosReader tis, FieldInfos fieldInfos) {
|
||||||
this.freqStream = (IndexInput) freqStream.clone();
|
this.freqStream = (IndexInput) freqStream.clone();
|
||||||
this.tis = tis;
|
this.tis = tis;
|
||||||
|
@ -78,8 +66,8 @@ public class SegmentTermDocs {
|
||||||
seek(ti, term);
|
seek(ti, term);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setSkipDocs(Bits skipDocs) {
|
public void setLiveDocs(Bits liveDocs) {
|
||||||
this.skipDocs = skipDocs;
|
this.liveDocs = liveDocs;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void seek(SegmentTermEnum segmentTermEnum) throws IOException {
|
public void seek(SegmentTermEnum segmentTermEnum) throws IOException {
|
||||||
|
@ -149,7 +137,7 @@ public class SegmentTermDocs {
|
||||||
|
|
||||||
count++;
|
count++;
|
||||||
|
|
||||||
if (skipDocs == null || !skipDocs.get(doc)) {
|
if (liveDocs == null || liveDocs.get(doc)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
skippingDoc();
|
skippingDoc();
|
||||||
|
@ -175,7 +163,7 @@ public class SegmentTermDocs {
|
||||||
freq = freqStream.readVInt(); // else read freq
|
freq = freqStream.readVInt(); // else read freq
|
||||||
count++;
|
count++;
|
||||||
|
|
||||||
if (skipDocs == null || !skipDocs.get(doc)) {
|
if (liveDocs == null || liveDocs.get(doc)) {
|
||||||
docs[i] = doc;
|
docs[i] = doc;
|
||||||
freqs[i] = freq;
|
freqs[i] = freq;
|
||||||
++i;
|
++i;
|
||||||
|
@ -192,7 +180,7 @@ public class SegmentTermDocs {
|
||||||
doc += freqStream.readVInt();
|
doc += freqStream.readVInt();
|
||||||
count++;
|
count++;
|
||||||
|
|
||||||
if (skipDocs == null || !skipDocs.get(doc)) {
|
if (liveDocs == null || liveDocs.get(doc)) {
|
||||||
docs[i] = doc;
|
docs[i] = doc;
|
||||||
// Hardware freq to 1 when term freqs were not
|
// Hardware freq to 1 when term freqs were not
|
||||||
// stored in the index
|
// stored in the index
|
||||||
|
|
|
@ -167,7 +167,7 @@ public class PulsingPostingsReaderImpl extends PostingsReaderBase {
|
||||||
// TODO: we could actually reuse, by having TL that
|
// TODO: we could actually reuse, by having TL that
|
||||||
// holds the last wrapped reuse, and vice-versa
|
// holds the last wrapped reuse, and vice-versa
|
||||||
@Override
|
@Override
|
||||||
public DocsEnum docs(FieldInfo field, BlockTermState _termState, Bits skipDocs, DocsEnum reuse) throws IOException {
|
public DocsEnum docs(FieldInfo field, BlockTermState _termState, Bits liveDocs, DocsEnum reuse) throws IOException {
|
||||||
PulsingTermState termState = (PulsingTermState) _termState;
|
PulsingTermState termState = (PulsingTermState) _termState;
|
||||||
if (termState.postingsSize != -1) {
|
if (termState.postingsSize != -1) {
|
||||||
PulsingDocsEnum postings;
|
PulsingDocsEnum postings;
|
||||||
|
@ -179,20 +179,20 @@ public class PulsingPostingsReaderImpl extends PostingsReaderBase {
|
||||||
} else {
|
} else {
|
||||||
postings = new PulsingDocsEnum(field);
|
postings = new PulsingDocsEnum(field);
|
||||||
}
|
}
|
||||||
return postings.reset(skipDocs, termState);
|
return postings.reset(liveDocs, termState);
|
||||||
} else {
|
} else {
|
||||||
// TODO: not great that we lose reuse of PulsingDocsEnum in this case:
|
// TODO: not great that we lose reuse of PulsingDocsEnum in this case:
|
||||||
if (reuse instanceof PulsingDocsEnum) {
|
if (reuse instanceof PulsingDocsEnum) {
|
||||||
return wrappedPostingsReader.docs(field, termState.wrappedTermState, skipDocs, null);
|
return wrappedPostingsReader.docs(field, termState.wrappedTermState, liveDocs, null);
|
||||||
} else {
|
} else {
|
||||||
return wrappedPostingsReader.docs(field, termState.wrappedTermState, skipDocs, reuse);
|
return wrappedPostingsReader.docs(field, termState.wrappedTermState, liveDocs, reuse);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: -- not great that we can't always reuse
|
// TODO: -- not great that we can't always reuse
|
||||||
@Override
|
@Override
|
||||||
public DocsAndPositionsEnum docsAndPositions(FieldInfo field, BlockTermState _termState, Bits skipDocs, DocsAndPositionsEnum reuse) throws IOException {
|
public DocsAndPositionsEnum docsAndPositions(FieldInfo field, BlockTermState _termState, Bits liveDocs, DocsAndPositionsEnum reuse) throws IOException {
|
||||||
if (field.omitTermFreqAndPositions) {
|
if (field.omitTermFreqAndPositions) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -211,12 +211,12 @@ public class PulsingPostingsReaderImpl extends PostingsReaderBase {
|
||||||
postings = new PulsingDocsAndPositionsEnum(field);
|
postings = new PulsingDocsAndPositionsEnum(field);
|
||||||
}
|
}
|
||||||
|
|
||||||
return postings.reset(skipDocs, termState);
|
return postings.reset(liveDocs, termState);
|
||||||
} else {
|
} else {
|
||||||
if (reuse instanceof PulsingDocsAndPositionsEnum) {
|
if (reuse instanceof PulsingDocsAndPositionsEnum) {
|
||||||
return wrappedPostingsReader.docsAndPositions(field, termState.wrappedTermState, skipDocs, null);
|
return wrappedPostingsReader.docsAndPositions(field, termState.wrappedTermState, liveDocs, null);
|
||||||
} else {
|
} else {
|
||||||
return wrappedPostingsReader.docsAndPositions(field, termState.wrappedTermState, skipDocs, reuse);
|
return wrappedPostingsReader.docsAndPositions(field, termState.wrappedTermState, liveDocs, reuse);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -225,7 +225,7 @@ public class PulsingPostingsReaderImpl extends PostingsReaderBase {
|
||||||
private final ByteArrayDataInput postings = new ByteArrayDataInput();
|
private final ByteArrayDataInput postings = new ByteArrayDataInput();
|
||||||
private final boolean omitTF;
|
private final boolean omitTF;
|
||||||
private final boolean storePayloads;
|
private final boolean storePayloads;
|
||||||
private Bits skipDocs;
|
private Bits liveDocs;
|
||||||
private int docID;
|
private int docID;
|
||||||
private int freq;
|
private int freq;
|
||||||
private int payloadLength;
|
private int payloadLength;
|
||||||
|
@ -235,7 +235,7 @@ public class PulsingPostingsReaderImpl extends PostingsReaderBase {
|
||||||
storePayloads = fieldInfo.storePayloads;
|
storePayloads = fieldInfo.storePayloads;
|
||||||
}
|
}
|
||||||
|
|
||||||
public PulsingDocsEnum reset(Bits skipDocs, PulsingTermState termState) {
|
public PulsingDocsEnum reset(Bits liveDocs, PulsingTermState termState) {
|
||||||
//System.out.println("PR docsEnum termState=" + termState + " docFreq=" + termState.docFreq);
|
//System.out.println("PR docsEnum termState=" + termState + " docFreq=" + termState.docFreq);
|
||||||
assert termState.postingsSize != -1;
|
assert termState.postingsSize != -1;
|
||||||
final byte[] bytes = new byte[termState.postingsSize];
|
final byte[] bytes = new byte[termState.postingsSize];
|
||||||
|
@ -244,7 +244,7 @@ public class PulsingPostingsReaderImpl extends PostingsReaderBase {
|
||||||
docID = 0;
|
docID = 0;
|
||||||
payloadLength = 0;
|
payloadLength = 0;
|
||||||
freq = 1;
|
freq = 1;
|
||||||
this.skipDocs = skipDocs;
|
this.liveDocs = liveDocs;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -291,7 +291,7 @@ public class PulsingPostingsReaderImpl extends PostingsReaderBase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (skipDocs == null || !skipDocs.get(docID)) {
|
if (liveDocs == null || liveDocs.get(docID)) {
|
||||||
//System.out.println(" return docID=" + docID + " freq=" + freq);
|
//System.out.println(" return docID=" + docID + " freq=" + freq);
|
||||||
return docID;
|
return docID;
|
||||||
}
|
}
|
||||||
|
@ -323,7 +323,7 @@ public class PulsingPostingsReaderImpl extends PostingsReaderBase {
|
||||||
private final ByteArrayDataInput postings = new ByteArrayDataInput();
|
private final ByteArrayDataInput postings = new ByteArrayDataInput();
|
||||||
private final boolean storePayloads;
|
private final boolean storePayloads;
|
||||||
|
|
||||||
private Bits skipDocs;
|
private Bits liveDocs;
|
||||||
private int docID;
|
private int docID;
|
||||||
private int freq;
|
private int freq;
|
||||||
private int posPending;
|
private int posPending;
|
||||||
|
@ -341,12 +341,12 @@ public class PulsingPostingsReaderImpl extends PostingsReaderBase {
|
||||||
return storePayloads == fieldInfo.storePayloads;
|
return storePayloads == fieldInfo.storePayloads;
|
||||||
}
|
}
|
||||||
|
|
||||||
public PulsingDocsAndPositionsEnum reset(Bits skipDocs, PulsingTermState termState) {
|
public PulsingDocsAndPositionsEnum reset(Bits liveDocs, PulsingTermState termState) {
|
||||||
assert termState.postingsSize != -1;
|
assert termState.postingsSize != -1;
|
||||||
final byte[] bytes = new byte[termState.postingsSize];
|
final byte[] bytes = new byte[termState.postingsSize];
|
||||||
System.arraycopy(termState.postings, 0, bytes, 0, termState.postingsSize);
|
System.arraycopy(termState.postings, 0, bytes, 0, termState.postingsSize);
|
||||||
postings.reset(bytes);
|
postings.reset(bytes);
|
||||||
this.skipDocs = skipDocs;
|
this.liveDocs = liveDocs;
|
||||||
payloadLength = 0;
|
payloadLength = 0;
|
||||||
posPending = 0;
|
posPending = 0;
|
||||||
docID = 0;
|
docID = 0;
|
||||||
|
@ -378,7 +378,7 @@ public class PulsingPostingsReaderImpl extends PostingsReaderBase {
|
||||||
}
|
}
|
||||||
posPending = freq;
|
posPending = freq;
|
||||||
|
|
||||||
if (skipDocs == null || !skipDocs.get(docID)) {
|
if (liveDocs == null || liveDocs.get(docID)) {
|
||||||
//System.out.println(" return docID=" + docID + " freq=" + freq);
|
//System.out.println(" return docID=" + docID + " freq=" + freq);
|
||||||
position = 0;
|
position = 0;
|
||||||
return docID;
|
return docID;
|
||||||
|
|
|
@ -256,7 +256,7 @@ public class SepPostingsReaderImpl extends PostingsReaderBase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsEnum docs(FieldInfo fieldInfo, BlockTermState _termState, Bits skipDocs, DocsEnum reuse) throws IOException {
|
public DocsEnum docs(FieldInfo fieldInfo, BlockTermState _termState, Bits liveDocs, DocsEnum reuse) throws IOException {
|
||||||
final SepTermState termState = (SepTermState) _termState;
|
final SepTermState termState = (SepTermState) _termState;
|
||||||
SepDocsEnum docsEnum;
|
SepDocsEnum docsEnum;
|
||||||
if (reuse == null || !(reuse instanceof SepDocsEnum)) {
|
if (reuse == null || !(reuse instanceof SepDocsEnum)) {
|
||||||
|
@ -271,11 +271,11 @@ public class SepPostingsReaderImpl extends PostingsReaderBase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return docsEnum.init(fieldInfo, termState, skipDocs);
|
return docsEnum.init(fieldInfo, termState, liveDocs);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsAndPositionsEnum docsAndPositions(FieldInfo fieldInfo, BlockTermState _termState, Bits skipDocs, DocsAndPositionsEnum reuse) throws IOException {
|
public DocsAndPositionsEnum docsAndPositions(FieldInfo fieldInfo, BlockTermState _termState, Bits liveDocs, DocsAndPositionsEnum reuse) throws IOException {
|
||||||
assert !fieldInfo.omitTermFreqAndPositions;
|
assert !fieldInfo.omitTermFreqAndPositions;
|
||||||
final SepTermState termState = (SepTermState) _termState;
|
final SepTermState termState = (SepTermState) _termState;
|
||||||
SepDocsAndPositionsEnum postingsEnum;
|
SepDocsAndPositionsEnum postingsEnum;
|
||||||
|
@ -291,7 +291,7 @@ public class SepPostingsReaderImpl extends PostingsReaderBase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return postingsEnum.init(fieldInfo, termState, skipDocs);
|
return postingsEnum.init(fieldInfo, termState, liveDocs);
|
||||||
}
|
}
|
||||||
|
|
||||||
class SepDocsEnum extends DocsEnum {
|
class SepDocsEnum extends DocsEnum {
|
||||||
|
@ -304,7 +304,7 @@ public class SepPostingsReaderImpl extends PostingsReaderBase {
|
||||||
// TODO: -- should we do omitTF with 2 different enum classes?
|
// TODO: -- should we do omitTF with 2 different enum classes?
|
||||||
private boolean omitTF;
|
private boolean omitTF;
|
||||||
private boolean storePayloads;
|
private boolean storePayloads;
|
||||||
private Bits skipDocs;
|
private Bits liveDocs;
|
||||||
private final IntIndexInput.Reader docReader;
|
private final IntIndexInput.Reader docReader;
|
||||||
private final IntIndexInput.Reader freqReader;
|
private final IntIndexInput.Reader freqReader;
|
||||||
private long skipFP;
|
private long skipFP;
|
||||||
|
@ -337,8 +337,8 @@ public class SepPostingsReaderImpl extends PostingsReaderBase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
SepDocsEnum init(FieldInfo fieldInfo, SepTermState termState, Bits skipDocs) throws IOException {
|
SepDocsEnum init(FieldInfo fieldInfo, SepTermState termState, Bits liveDocs) throws IOException {
|
||||||
this.skipDocs = skipDocs;
|
this.liveDocs = liveDocs;
|
||||||
omitTF = fieldInfo.omitTermFreqAndPositions;
|
omitTF = fieldInfo.omitTermFreqAndPositions;
|
||||||
storePayloads = fieldInfo.storePayloads;
|
storePayloads = fieldInfo.storePayloads;
|
||||||
|
|
||||||
|
@ -383,7 +383,7 @@ public class SepPostingsReaderImpl extends PostingsReaderBase {
|
||||||
freq = freqReader.next();
|
freq = freqReader.next();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (skipDocs == null || !skipDocs.get(doc)) {
|
if (liveDocs == null || liveDocs.get(doc)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -408,7 +408,7 @@ public class SepPostingsReaderImpl extends PostingsReaderBase {
|
||||||
freq = freqReader.next();
|
freq = freqReader.next();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (skipDocs == null || !skipDocs.get(doc)) {
|
if (liveDocs == null || liveDocs.get(doc)) {
|
||||||
docs[i] = doc;
|
docs[i] = doc;
|
||||||
freqs[i] = freq;
|
freqs[i] = freq;
|
||||||
//System.out.println(" docs[" + i + "]=" + doc + " count=" + count + " dF=" + docFreq);
|
//System.out.println(" docs[" + i + "]=" + doc + " count=" + count + " dF=" + docFreq);
|
||||||
|
@ -493,7 +493,7 @@ public class SepPostingsReaderImpl extends PostingsReaderBase {
|
||||||
long freqStart;
|
long freqStart;
|
||||||
|
|
||||||
private boolean storePayloads;
|
private boolean storePayloads;
|
||||||
private Bits skipDocs;
|
private Bits liveDocs;
|
||||||
private final IntIndexInput.Reader docReader;
|
private final IntIndexInput.Reader docReader;
|
||||||
private final IntIndexInput.Reader freqReader;
|
private final IntIndexInput.Reader freqReader;
|
||||||
private final IntIndexInput.Reader posReader;
|
private final IntIndexInput.Reader posReader;
|
||||||
|
@ -528,8 +528,8 @@ public class SepPostingsReaderImpl extends PostingsReaderBase {
|
||||||
payloadIn = (IndexInput) SepPostingsReaderImpl.this.payloadIn.clone();
|
payloadIn = (IndexInput) SepPostingsReaderImpl.this.payloadIn.clone();
|
||||||
}
|
}
|
||||||
|
|
||||||
SepDocsAndPositionsEnum init(FieldInfo fieldInfo, SepTermState termState, Bits skipDocs) throws IOException {
|
SepDocsAndPositionsEnum init(FieldInfo fieldInfo, SepTermState termState, Bits liveDocs) throws IOException {
|
||||||
this.skipDocs = skipDocs;
|
this.liveDocs = liveDocs;
|
||||||
storePayloads = fieldInfo.storePayloads;
|
storePayloads = fieldInfo.storePayloads;
|
||||||
//System.out.println("Sep D&P init");
|
//System.out.println("Sep D&P init");
|
||||||
|
|
||||||
|
@ -584,7 +584,7 @@ public class SepPostingsReaderImpl extends PostingsReaderBase {
|
||||||
|
|
||||||
pendingPosCount += freq;
|
pendingPosCount += freq;
|
||||||
|
|
||||||
if (skipDocs == null || !skipDocs.get(doc)) {
|
if (liveDocs == null || liveDocs.get(doc)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -211,18 +211,18 @@ class SimpleTextFieldsReader extends FieldsProducer {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsEnum docs(Bits skipDocs, DocsEnum reuse) throws IOException {
|
public DocsEnum docs(Bits liveDocs, DocsEnum reuse) throws IOException {
|
||||||
SimpleTextDocsEnum docsEnum;
|
SimpleTextDocsEnum docsEnum;
|
||||||
if (reuse != null && reuse instanceof SimpleTextDocsEnum && ((SimpleTextDocsEnum) reuse).canReuse(in)) {
|
if (reuse != null && reuse instanceof SimpleTextDocsEnum && ((SimpleTextDocsEnum) reuse).canReuse(in)) {
|
||||||
docsEnum = (SimpleTextDocsEnum) reuse;
|
docsEnum = (SimpleTextDocsEnum) reuse;
|
||||||
} else {
|
} else {
|
||||||
docsEnum = new SimpleTextDocsEnum();
|
docsEnum = new SimpleTextDocsEnum();
|
||||||
}
|
}
|
||||||
return docsEnum.reset(docsStart, skipDocs, omitTF);
|
return docsEnum.reset(docsStart, liveDocs, omitTF);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsAndPositionsEnum docsAndPositions(Bits skipDocs, DocsAndPositionsEnum reuse) throws IOException {
|
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse) throws IOException {
|
||||||
if (omitTF) {
|
if (omitTF) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -233,7 +233,7 @@ class SimpleTextFieldsReader extends FieldsProducer {
|
||||||
} else {
|
} else {
|
||||||
docsAndPositionsEnum = new SimpleTextDocsAndPositionsEnum();
|
docsAndPositionsEnum = new SimpleTextDocsAndPositionsEnum();
|
||||||
}
|
}
|
||||||
return docsAndPositionsEnum.reset(docsStart, skipDocs);
|
return docsAndPositionsEnum.reset(docsStart, liveDocs);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -248,7 +248,7 @@ class SimpleTextFieldsReader extends FieldsProducer {
|
||||||
private boolean omitTF;
|
private boolean omitTF;
|
||||||
private int docID;
|
private int docID;
|
||||||
private int tf;
|
private int tf;
|
||||||
private Bits skipDocs;
|
private Bits liveDocs;
|
||||||
private final BytesRef scratch = new BytesRef(10);
|
private final BytesRef scratch = new BytesRef(10);
|
||||||
private final CharsRef scratchUTF16 = new CharsRef(10);
|
private final CharsRef scratchUTF16 = new CharsRef(10);
|
||||||
|
|
||||||
|
@ -261,8 +261,8 @@ class SimpleTextFieldsReader extends FieldsProducer {
|
||||||
return in == inStart;
|
return in == inStart;
|
||||||
}
|
}
|
||||||
|
|
||||||
public SimpleTextDocsEnum reset(long fp, Bits skipDocs, boolean omitTF) throws IOException {
|
public SimpleTextDocsEnum reset(long fp, Bits liveDocs, boolean omitTF) throws IOException {
|
||||||
this.skipDocs = skipDocs;
|
this.liveDocs = liveDocs;
|
||||||
in.seek(fp);
|
in.seek(fp);
|
||||||
this.omitTF = omitTF;
|
this.omitTF = omitTF;
|
||||||
if (omitTF) {
|
if (omitTF) {
|
||||||
|
@ -292,7 +292,7 @@ class SimpleTextFieldsReader extends FieldsProducer {
|
||||||
final long lineStart = in.getFilePointer();
|
final long lineStart = in.getFilePointer();
|
||||||
readLine(in, scratch);
|
readLine(in, scratch);
|
||||||
if (scratch.startsWith(DOC)) {
|
if (scratch.startsWith(DOC)) {
|
||||||
if (!first && (skipDocs == null || !skipDocs.get(docID))) {
|
if (!first && (liveDocs == null || liveDocs.get(docID))) {
|
||||||
in.seek(lineStart);
|
in.seek(lineStart);
|
||||||
if (!omitTF) {
|
if (!omitTF) {
|
||||||
tf = termFreq;
|
tf = termFreq;
|
||||||
|
@ -309,7 +309,7 @@ class SimpleTextFieldsReader extends FieldsProducer {
|
||||||
// skip
|
// skip
|
||||||
} else {
|
} else {
|
||||||
assert scratch.startsWith(TERM) || scratch.startsWith(FIELD) || scratch.startsWith(END): "scratch=" + scratch.utf8ToString();
|
assert scratch.startsWith(TERM) || scratch.startsWith(FIELD) || scratch.startsWith(END): "scratch=" + scratch.utf8ToString();
|
||||||
if (!first && (skipDocs == null || !skipDocs.get(docID))) {
|
if (!first && (liveDocs == null || liveDocs.get(docID))) {
|
||||||
in.seek(lineStart);
|
in.seek(lineStart);
|
||||||
if (!omitTF) {
|
if (!omitTF) {
|
||||||
tf = termFreq;
|
tf = termFreq;
|
||||||
|
@ -334,7 +334,7 @@ class SimpleTextFieldsReader extends FieldsProducer {
|
||||||
private final IndexInput in;
|
private final IndexInput in;
|
||||||
private int docID;
|
private int docID;
|
||||||
private int tf;
|
private int tf;
|
||||||
private Bits skipDocs;
|
private Bits liveDocs;
|
||||||
private final BytesRef scratch = new BytesRef(10);
|
private final BytesRef scratch = new BytesRef(10);
|
||||||
private final BytesRef scratch2 = new BytesRef(10);
|
private final BytesRef scratch2 = new BytesRef(10);
|
||||||
private final CharsRef scratchUTF16 = new CharsRef(10);
|
private final CharsRef scratchUTF16 = new CharsRef(10);
|
||||||
|
@ -351,8 +351,8 @@ class SimpleTextFieldsReader extends FieldsProducer {
|
||||||
return in == inStart;
|
return in == inStart;
|
||||||
}
|
}
|
||||||
|
|
||||||
public SimpleTextDocsAndPositionsEnum reset(long fp, Bits skipDocs) {
|
public SimpleTextDocsAndPositionsEnum reset(long fp, Bits liveDocs) {
|
||||||
this.skipDocs = skipDocs;
|
this.liveDocs = liveDocs;
|
||||||
nextDocStart = fp;
|
nextDocStart = fp;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
@ -376,7 +376,7 @@ class SimpleTextFieldsReader extends FieldsProducer {
|
||||||
final long lineStart = in.getFilePointer();
|
final long lineStart = in.getFilePointer();
|
||||||
readLine(in, scratch);
|
readLine(in, scratch);
|
||||||
if (scratch.startsWith(DOC)) {
|
if (scratch.startsWith(DOC)) {
|
||||||
if (!first && (skipDocs == null || !skipDocs.get(docID))) {
|
if (!first && (liveDocs == null || liveDocs.get(docID))) {
|
||||||
nextDocStart = lineStart;
|
nextDocStart = lineStart;
|
||||||
in.seek(posStart);
|
in.seek(posStart);
|
||||||
return docID;
|
return docID;
|
||||||
|
@ -392,7 +392,7 @@ class SimpleTextFieldsReader extends FieldsProducer {
|
||||||
// skip
|
// skip
|
||||||
} else {
|
} else {
|
||||||
assert scratch.startsWith(TERM) || scratch.startsWith(FIELD) || scratch.startsWith(END);
|
assert scratch.startsWith(TERM) || scratch.startsWith(FIELD) || scratch.startsWith(END);
|
||||||
if (!first && (skipDocs == null || !skipDocs.get(docID))) {
|
if (!first && (liveDocs == null || liveDocs.get(docID))) {
|
||||||
nextDocStart = lineStart;
|
nextDocStart = lineStart;
|
||||||
in.seek(posStart);
|
in.seek(posStart);
|
||||||
return docID;
|
return docID;
|
||||||
|
|
|
@ -200,7 +200,7 @@ public class StandardPostingsReader extends PostingsReaderBase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsEnum docs(FieldInfo fieldInfo, BlockTermState termState, Bits skipDocs, DocsEnum reuse) throws IOException {
|
public DocsEnum docs(FieldInfo fieldInfo, BlockTermState termState, Bits liveDocs, DocsEnum reuse) throws IOException {
|
||||||
SegmentDocsEnum docsEnum;
|
SegmentDocsEnum docsEnum;
|
||||||
if (reuse == null || !(reuse instanceof SegmentDocsEnum)) {
|
if (reuse == null || !(reuse instanceof SegmentDocsEnum)) {
|
||||||
docsEnum = new SegmentDocsEnum(freqIn);
|
docsEnum = new SegmentDocsEnum(freqIn);
|
||||||
|
@ -213,11 +213,11 @@ public class StandardPostingsReader extends PostingsReaderBase {
|
||||||
docsEnum = new SegmentDocsEnum(freqIn);
|
docsEnum = new SegmentDocsEnum(freqIn);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return docsEnum.reset(fieldInfo, (StandardTermState) termState, skipDocs);
|
return docsEnum.reset(fieldInfo, (StandardTermState) termState, liveDocs);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsAndPositionsEnum docsAndPositions(FieldInfo fieldInfo, BlockTermState termState, Bits skipDocs, DocsAndPositionsEnum reuse) throws IOException {
|
public DocsAndPositionsEnum docsAndPositions(FieldInfo fieldInfo, BlockTermState termState, Bits liveDocs, DocsAndPositionsEnum reuse) throws IOException {
|
||||||
if (fieldInfo.omitTermFreqAndPositions) {
|
if (fieldInfo.omitTermFreqAndPositions) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -236,7 +236,7 @@ public class StandardPostingsReader extends PostingsReaderBase {
|
||||||
docsEnum = new SegmentDocsAndPositionsAndPayloadsEnum(freqIn, proxIn);
|
docsEnum = new SegmentDocsAndPositionsAndPayloadsEnum(freqIn, proxIn);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return docsEnum.reset(fieldInfo, (StandardTermState) termState, skipDocs);
|
return docsEnum.reset(fieldInfo, (StandardTermState) termState, liveDocs);
|
||||||
} else {
|
} else {
|
||||||
SegmentDocsAndPositionsEnum docsEnum;
|
SegmentDocsAndPositionsEnum docsEnum;
|
||||||
if (reuse == null || !(reuse instanceof SegmentDocsAndPositionsEnum)) {
|
if (reuse == null || !(reuse instanceof SegmentDocsAndPositionsEnum)) {
|
||||||
|
@ -250,7 +250,7 @@ public class StandardPostingsReader extends PostingsReaderBase {
|
||||||
docsEnum = new SegmentDocsAndPositionsEnum(freqIn, proxIn);
|
docsEnum = new SegmentDocsAndPositionsEnum(freqIn, proxIn);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return docsEnum.reset(fieldInfo, (StandardTermState) termState, skipDocs);
|
return docsEnum.reset(fieldInfo, (StandardTermState) termState, liveDocs);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -267,7 +267,7 @@ public class StandardPostingsReader extends PostingsReaderBase {
|
||||||
int doc; // doc we last read
|
int doc; // doc we last read
|
||||||
int freq; // freq we last read
|
int freq; // freq we last read
|
||||||
|
|
||||||
Bits skipDocs;
|
Bits liveDocs;
|
||||||
|
|
||||||
long freqOffset;
|
long freqOffset;
|
||||||
int skipOffset;
|
int skipOffset;
|
||||||
|
@ -280,13 +280,13 @@ public class StandardPostingsReader extends PostingsReaderBase {
|
||||||
this.freqIn = (IndexInput) freqIn.clone();
|
this.freqIn = (IndexInput) freqIn.clone();
|
||||||
}
|
}
|
||||||
|
|
||||||
public SegmentDocsEnum reset(FieldInfo fieldInfo, StandardTermState termState, Bits skipDocs) throws IOException {
|
public SegmentDocsEnum reset(FieldInfo fieldInfo, StandardTermState termState, Bits liveDocs) throws IOException {
|
||||||
omitTF = fieldInfo.omitTermFreqAndPositions;
|
omitTF = fieldInfo.omitTermFreqAndPositions;
|
||||||
if (omitTF) {
|
if (omitTF) {
|
||||||
freq = 1;
|
freq = 1;
|
||||||
}
|
}
|
||||||
storePayloads = fieldInfo.storePayloads;
|
storePayloads = fieldInfo.storePayloads;
|
||||||
this.skipDocs = skipDocs;
|
this.liveDocs = liveDocs;
|
||||||
freqOffset = termState.freqOffset;
|
freqOffset = termState.freqOffset;
|
||||||
skipOffset = termState.skipOffset;
|
skipOffset = termState.skipOffset;
|
||||||
|
|
||||||
|
@ -327,7 +327,7 @@ public class StandardPostingsReader extends PostingsReaderBase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (skipDocs == null || !skipDocs.get(doc)) {
|
if (liveDocs == null || liveDocs.get(doc)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -357,7 +357,7 @@ public class StandardPostingsReader extends PostingsReaderBase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (skipDocs == null || !skipDocs.get(doc)) {
|
if (liveDocs == null || liveDocs.get(doc)) {
|
||||||
docs[i] = doc;
|
docs[i] = doc;
|
||||||
freqs[i] = freq;
|
freqs[i] = freq;
|
||||||
++i;
|
++i;
|
||||||
|
@ -435,7 +435,7 @@ public class StandardPostingsReader extends PostingsReaderBase {
|
||||||
int freq; // freq we last read
|
int freq; // freq we last read
|
||||||
int position;
|
int position;
|
||||||
|
|
||||||
Bits skipDocs;
|
Bits liveDocs;
|
||||||
|
|
||||||
long freqOffset;
|
long freqOffset;
|
||||||
int skipOffset;
|
int skipOffset;
|
||||||
|
@ -453,11 +453,11 @@ public class StandardPostingsReader extends PostingsReaderBase {
|
||||||
this.proxIn = (IndexInput) proxIn.clone();
|
this.proxIn = (IndexInput) proxIn.clone();
|
||||||
}
|
}
|
||||||
|
|
||||||
public SegmentDocsAndPositionsEnum reset(FieldInfo fieldInfo, StandardTermState termState, Bits skipDocs) throws IOException {
|
public SegmentDocsAndPositionsEnum reset(FieldInfo fieldInfo, StandardTermState termState, Bits liveDocs) throws IOException {
|
||||||
assert !fieldInfo.omitTermFreqAndPositions;
|
assert !fieldInfo.omitTermFreqAndPositions;
|
||||||
assert !fieldInfo.storePayloads;
|
assert !fieldInfo.storePayloads;
|
||||||
|
|
||||||
this.skipDocs = skipDocs;
|
this.liveDocs = liveDocs;
|
||||||
|
|
||||||
// TODO: for full enum case (eg segment merging) this
|
// TODO: for full enum case (eg segment merging) this
|
||||||
// seek is unnecessary; maybe we can avoid in such
|
// seek is unnecessary; maybe we can avoid in such
|
||||||
|
@ -504,7 +504,7 @@ public class StandardPostingsReader extends PostingsReaderBase {
|
||||||
}
|
}
|
||||||
posPendingCount += freq;
|
posPendingCount += freq;
|
||||||
|
|
||||||
if (skipDocs == null || !skipDocs.get(doc)) {
|
if (liveDocs == null || liveDocs.get(doc)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -626,7 +626,7 @@ public class StandardPostingsReader extends PostingsReaderBase {
|
||||||
int freq; // freq we last read
|
int freq; // freq we last read
|
||||||
int position;
|
int position;
|
||||||
|
|
||||||
Bits skipDocs;
|
Bits liveDocs;
|
||||||
|
|
||||||
long freqOffset;
|
long freqOffset;
|
||||||
int skipOffset;
|
int skipOffset;
|
||||||
|
@ -647,7 +647,7 @@ public class StandardPostingsReader extends PostingsReaderBase {
|
||||||
this.proxIn = (IndexInput) proxIn.clone();
|
this.proxIn = (IndexInput) proxIn.clone();
|
||||||
}
|
}
|
||||||
|
|
||||||
public SegmentDocsAndPositionsAndPayloadsEnum reset(FieldInfo fieldInfo, StandardTermState termState, Bits skipDocs) throws IOException {
|
public SegmentDocsAndPositionsAndPayloadsEnum reset(FieldInfo fieldInfo, StandardTermState termState, Bits liveDocs) throws IOException {
|
||||||
assert !fieldInfo.omitTermFreqAndPositions;
|
assert !fieldInfo.omitTermFreqAndPositions;
|
||||||
assert fieldInfo.storePayloads;
|
assert fieldInfo.storePayloads;
|
||||||
if (payload == null) {
|
if (payload == null) {
|
||||||
|
@ -655,7 +655,7 @@ public class StandardPostingsReader extends PostingsReaderBase {
|
||||||
payload.bytes = new byte[1];
|
payload.bytes = new byte[1];
|
||||||
}
|
}
|
||||||
|
|
||||||
this.skipDocs = skipDocs;
|
this.liveDocs = liveDocs;
|
||||||
|
|
||||||
// TODO: for full enum case (eg segment merging) this
|
// TODO: for full enum case (eg segment merging) this
|
||||||
// seek is unnecessary; maybe we can avoid in such
|
// seek is unnecessary; maybe we can avoid in such
|
||||||
|
@ -701,7 +701,7 @@ public class StandardPostingsReader extends PostingsReaderBase {
|
||||||
}
|
}
|
||||||
posPendingCount += freq;
|
posPendingCount += freq;
|
||||||
|
|
||||||
if (skipDocs == null || !skipDocs.get(doc)) {
|
if (liveDocs == null || liveDocs.get(doc)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -103,7 +103,7 @@ class FixedStraightBytesImpl {
|
||||||
datOut = getDataOut();
|
datOut = getDataOut();
|
||||||
boolean success = false;
|
boolean success = false;
|
||||||
try {
|
try {
|
||||||
if (state.bits == null && state.reader instanceof Reader) {
|
if (state.liveDocs == null && state.reader instanceof Reader) {
|
||||||
Reader reader = (Reader) state.reader;
|
Reader reader = (Reader) state.reader;
|
||||||
final int maxDocs = reader.maxDoc;
|
final int maxDocs = reader.maxDoc;
|
||||||
if (maxDocs == 0) {
|
if (maxDocs == 0) {
|
||||||
|
|
|
@ -131,7 +131,7 @@ public class Floats {
|
||||||
if (datOut == null) {
|
if (datOut == null) {
|
||||||
initDataOut();
|
initDataOut();
|
||||||
}
|
}
|
||||||
if (state.bits == null && state.reader instanceof FloatsReader) {
|
if (state.liveDocs == null && state.reader instanceof FloatsReader) {
|
||||||
// no deletes - bulk copy
|
// no deletes - bulk copy
|
||||||
final FloatsReader reader = (FloatsReader) state.reader;
|
final FloatsReader reader = (FloatsReader) state.reader;
|
||||||
assert reader.precisionBytes == (int) precision;
|
assert reader.precisionBytes == (int) precision;
|
||||||
|
|
|
@ -182,7 +182,7 @@ class IntsImpl {
|
||||||
merging = true;
|
merging = true;
|
||||||
if (typeOrd != PACKED) {
|
if (typeOrd != PACKED) {
|
||||||
initDataOut(typeOrd); // init datOut since we merge directly
|
initDataOut(typeOrd); // init datOut since we merge directly
|
||||||
if (state.bits == null && state.reader instanceof IntsReader) {
|
if (state.liveDocs == null && state.reader instanceof IntsReader) {
|
||||||
// no deleted docs - try bulk copy
|
// no deleted docs - try bulk copy
|
||||||
final IntsReader reader = (IntsReader) state.reader;
|
final IntsReader reader = (IntsReader) state.reader;
|
||||||
if (reader.type == typeOrd) {
|
if (reader.type == typeOrd) {
|
||||||
|
|
|
@ -97,7 +97,7 @@ class VarStraightBytesImpl {
|
||||||
datOut = getDataOut();
|
datOut = getDataOut();
|
||||||
boolean success = false;
|
boolean success = false;
|
||||||
try {
|
try {
|
||||||
if (state.bits == null && state.reader instanceof Reader) {
|
if (state.liveDocs == null && state.reader instanceof Reader) {
|
||||||
// bulk merge since we don't have any deletes
|
// bulk merge since we don't have any deletes
|
||||||
Reader reader = (Reader) state.reader;
|
Reader reader = (Reader) state.reader;
|
||||||
final int maxDocs = reader.maxDoc;
|
final int maxDocs = reader.maxDoc;
|
||||||
|
|
|
@ -147,12 +147,12 @@ public abstract class Writer extends DocValuesConsumer {
|
||||||
// impl. will get the correct reference for the type
|
// impl. will get the correct reference for the type
|
||||||
// it supports
|
// it supports
|
||||||
int docID = state.docBase;
|
int docID = state.docBase;
|
||||||
final Bits bits = state.bits;
|
final Bits liveDocs = state.liveDocs;
|
||||||
final int docCount = state.docCount;
|
final int docCount = state.docCount;
|
||||||
int currentDocId;
|
int currentDocId;
|
||||||
if ((currentDocId = valEnum.advance(0)) != ValuesEnum.NO_MORE_DOCS) {
|
if ((currentDocId = valEnum.advance(0)) != ValuesEnum.NO_MORE_DOCS) {
|
||||||
for (int i = 0; i < docCount; i++) {
|
for (int i = 0; i < docCount; i++) {
|
||||||
if (bits == null || !bits.get(i)) {
|
if (liveDocs == null || liveDocs.get(i)) {
|
||||||
if (currentDocId < i) {
|
if (currentDocId < i) {
|
||||||
if ((currentDocId = valEnum.advance(i)) == ValuesEnum.NO_MORE_DOCS) {
|
if ((currentDocId = valEnum.advance(i)) == ValuesEnum.NO_MORE_DOCS) {
|
||||||
break; // advance can jump over default values
|
break; // advance can jump over default values
|
||||||
|
|
|
@ -54,7 +54,7 @@ public class CachingSpanFilter extends SpanFilter {
|
||||||
}
|
}
|
||||||
this.cache = new CachingWrapperFilter.FilterCache<SpanFilterResult>(deletesMode) {
|
this.cache = new CachingWrapperFilter.FilterCache<SpanFilterResult>(deletesMode) {
|
||||||
@Override
|
@Override
|
||||||
protected SpanFilterResult mergeDeletes(final Bits delDocs, final SpanFilterResult value) {
|
protected SpanFilterResult mergeLiveDocs(final Bits liveDocs, final SpanFilterResult value) {
|
||||||
throw new IllegalStateException("DeletesMode.DYNAMIC is not supported");
|
throw new IllegalStateException("DeletesMode.DYNAMIC is not supported");
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -73,7 +73,7 @@ public class CachingSpanFilter extends SpanFilter {
|
||||||
final IndexReader reader = context.reader;
|
final IndexReader reader = context.reader;
|
||||||
|
|
||||||
final Object coreKey = reader.getCoreCacheKey();
|
final Object coreKey = reader.getCoreCacheKey();
|
||||||
final Object delCoreKey = reader.hasDeletions() ? reader.getDeletedDocs() : coreKey;
|
final Object delCoreKey = reader.hasDeletions() ? reader.getLiveDocs() : coreKey;
|
||||||
|
|
||||||
SpanFilterResult result = cache.get(reader, coreKey, delCoreKey);
|
SpanFilterResult result = cache.get(reader, coreKey, delCoreKey);
|
||||||
if (result != null) {
|
if (result != null) {
|
||||||
|
|
|
@ -103,13 +103,13 @@ public class CachingWrapperFilter extends Filter {
|
||||||
value = cache.get(delCoreKey);
|
value = cache.get(delCoreKey);
|
||||||
|
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
// now for core match, but dynamically AND NOT
|
// now for core match, but dynamically AND
|
||||||
// deletions
|
// live docs
|
||||||
value = cache.get(coreKey);
|
value = cache.get(coreKey);
|
||||||
if (value != null) {
|
if (value != null) {
|
||||||
final Bits delDocs = reader.getDeletedDocs();
|
final Bits liveDocs = reader.getLiveDocs();
|
||||||
if (delDocs != null) {
|
if (liveDocs != null) {
|
||||||
value = mergeDeletes(delDocs, value);
|
value = mergeLiveDocs(liveDocs, value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -118,7 +118,7 @@ public class CachingWrapperFilter extends Filter {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract T mergeDeletes(Bits delDocs, T value);
|
protected abstract T mergeLiveDocs(Bits liveDocs, T value);
|
||||||
|
|
||||||
public synchronized void put(Object coreKey, Object delCoreKey, T value) {
|
public synchronized void put(Object coreKey, Object delCoreKey, T value) {
|
||||||
if (deletesMode == DeletesMode.IGNORE) {
|
if (deletesMode == DeletesMode.IGNORE) {
|
||||||
|
@ -158,11 +158,11 @@ public class CachingWrapperFilter extends Filter {
|
||||||
this.filter = filter;
|
this.filter = filter;
|
||||||
cache = new FilterCache<DocIdSet>(deletesMode) {
|
cache = new FilterCache<DocIdSet>(deletesMode) {
|
||||||
@Override
|
@Override
|
||||||
public DocIdSet mergeDeletes(final Bits delDocs, final DocIdSet docIdSet) {
|
public DocIdSet mergeLiveDocs(final Bits liveDocs, final DocIdSet docIdSet) {
|
||||||
return new FilteredDocIdSet(docIdSet) {
|
return new FilteredDocIdSet(docIdSet) {
|
||||||
@Override
|
@Override
|
||||||
protected boolean match(int docID) {
|
protected boolean match(int docID) {
|
||||||
return !delDocs.get(docID);
|
return liveDocs.get(docID);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -197,7 +197,7 @@ public class CachingWrapperFilter extends Filter {
|
||||||
public DocIdSet getDocIdSet(AtomicReaderContext context) throws IOException {
|
public DocIdSet getDocIdSet(AtomicReaderContext context) throws IOException {
|
||||||
final IndexReader reader = context.reader;
|
final IndexReader reader = context.reader;
|
||||||
final Object coreKey = reader.getCoreCacheKey();
|
final Object coreKey = reader.getCoreCacheKey();
|
||||||
final Object delCoreKey = reader.hasDeletions() ? reader.getDeletedDocs() : coreKey;
|
final Object delCoreKey = reader.hasDeletions() ? reader.getLiveDocs() : coreKey;
|
||||||
|
|
||||||
DocIdSet docIdSet = cache.get(reader, coreKey, delCoreKey);
|
DocIdSet docIdSet = cache.get(reader, coreKey, delCoreKey);
|
||||||
if (docIdSet != null) {
|
if (docIdSet != null) {
|
||||||
|
|
|
@ -136,16 +136,18 @@ public class ConstantScoreQuery extends Query {
|
||||||
if (filter != null) {
|
if (filter != null) {
|
||||||
assert query == null;
|
assert query == null;
|
||||||
final DocIdSet dis = filter.getDocIdSet(context);
|
final DocIdSet dis = filter.getDocIdSet(context);
|
||||||
if (dis == null)
|
if (dis == null) {
|
||||||
return null;
|
return null;
|
||||||
|
}
|
||||||
disi = dis.iterator();
|
disi = dis.iterator();
|
||||||
} else {
|
} else {
|
||||||
assert query != null && innerWeight != null;
|
assert query != null && innerWeight != null;
|
||||||
disi =
|
disi = innerWeight.scorer(context, scorerContext);
|
||||||
innerWeight.scorer(context, scorerContext);
|
|
||||||
}
|
}
|
||||||
if (disi == null)
|
|
||||||
|
if (disi == null) {
|
||||||
return null;
|
return null;
|
||||||
|
}
|
||||||
return new ConstantScorer(disi, this);
|
return new ConstantScorer(disi, this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,6 @@ import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
|
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
|
||||||
import org.apache.lucene.index.MultiFields;
|
|
||||||
import org.apache.lucene.util.NumericUtils;
|
import org.apache.lucene.util.NumericUtils;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
@ -533,9 +532,9 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
|
||||||
@Override
|
@Override
|
||||||
public DocIdSetIterator iterator() throws IOException {
|
public DocIdSetIterator iterator() throws IOException {
|
||||||
|
|
||||||
final Bits skipDocs = canIgnoreDeletedDocs ? null : reader.getDeletedDocs();
|
final Bits liveDocs = canIgnoreDeletedDocs ? null : reader.getLiveDocs();
|
||||||
|
|
||||||
if (skipDocs == null) {
|
if (liveDocs == null) {
|
||||||
// Specialization optimization disregard deletions
|
// Specialization optimization disregard deletions
|
||||||
return new DocIdSetIterator() {
|
return new DocIdSetIterator() {
|
||||||
private int doc = -1;
|
private int doc = -1;
|
||||||
|
@ -575,7 +574,7 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
|
||||||
final int maxDoc = reader.maxDoc();
|
final int maxDoc = reader.maxDoc();
|
||||||
|
|
||||||
// a DocIdSetIterator generating docIds by
|
// a DocIdSetIterator generating docIds by
|
||||||
// incrementing a variable & checking skipDocs -
|
// incrementing a variable & checking liveDocs -
|
||||||
return new DocIdSetIterator() {
|
return new DocIdSetIterator() {
|
||||||
private int doc = -1;
|
private int doc = -1;
|
||||||
@Override
|
@Override
|
||||||
|
@ -590,14 +589,14 @@ public abstract class FieldCacheRangeFilter<T> extends Filter {
|
||||||
if (doc >= maxDoc) {
|
if (doc >= maxDoc) {
|
||||||
return doc = NO_MORE_DOCS;
|
return doc = NO_MORE_DOCS;
|
||||||
}
|
}
|
||||||
} while (skipDocs.get(doc) || !matchDoc(doc));
|
} while (!liveDocs.get(doc) || !matchDoc(doc));
|
||||||
return doc;
|
return doc;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int advance(int target) {
|
public int advance(int target) {
|
||||||
for(doc=target;doc<maxDoc;doc++) {
|
for(doc=target;doc<maxDoc;doc++) {
|
||||||
if (!skipDocs.get(doc) && matchDoc(doc)) {
|
if (liveDocs.get(doc) && matchDoc(doc)) {
|
||||||
return doc;
|
return doc;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -44,7 +44,7 @@ public abstract class Filter {
|
||||||
* represent the whole underlying index i.e. if the index has more than
|
* represent the whole underlying index i.e. if the index has more than
|
||||||
* one segment the given reader only represents a single segment.
|
* one segment the given reader only represents a single segment.
|
||||||
* The provided context is always an atomic context, so you can call
|
* The provided context is always an atomic context, so you can call
|
||||||
* {@link IndexReader#fields()} or {@link IndexReader#getDeletedDocs()}
|
* {@link IndexReader#fields()} or {@link IndexReader#getLiveDocs()}
|
||||||
* on the context's reader, for example.
|
* on the context's reader, for example.
|
||||||
*
|
*
|
||||||
* @return a DocIdSet that provides the documents which should be permitted or
|
* @return a DocIdSet that provides the documents which should be permitted or
|
||||||
|
|
|
@ -252,14 +252,14 @@ public final class FuzzyTermsEnum extends TermsEnum {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsEnum docs(Bits skipDocs, DocsEnum reuse) throws IOException {
|
public DocsEnum docs(Bits liveDocs, DocsEnum reuse) throws IOException {
|
||||||
return actualEnum.docs(skipDocs, reuse);
|
return actualEnum.docs(liveDocs, reuse);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsAndPositionsEnum docsAndPositions(Bits skipDocs,
|
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs,
|
||||||
DocsAndPositionsEnum reuse) throws IOException {
|
DocsAndPositionsEnum reuse) throws IOException {
|
||||||
return actualEnum.docsAndPositions(skipDocs, reuse);
|
return actualEnum.docsAndPositions(liveDocs, reuse);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -50,14 +50,14 @@ public class MatchAllDocsQuery extends Query {
|
||||||
final byte[] norms;
|
final byte[] norms;
|
||||||
private int doc = -1;
|
private int doc = -1;
|
||||||
private final int maxDoc;
|
private final int maxDoc;
|
||||||
private final Bits delDocs;
|
private final Bits liveDocs;
|
||||||
private final Similarity similarity;
|
private final Similarity similarity;
|
||||||
|
|
||||||
MatchAllScorer(IndexReader reader, Similarity similarity, Weight w,
|
MatchAllScorer(IndexReader reader, Similarity similarity, Weight w,
|
||||||
byte[] norms) throws IOException {
|
byte[] norms) throws IOException {
|
||||||
super(w);
|
super(w);
|
||||||
this.similarity = similarity;
|
this.similarity = similarity;
|
||||||
delDocs = reader.getDeletedDocs();
|
liveDocs = reader.getLiveDocs();
|
||||||
score = w.getValue();
|
score = w.getValue();
|
||||||
maxDoc = reader.maxDoc();
|
maxDoc = reader.maxDoc();
|
||||||
this.norms = norms;
|
this.norms = norms;
|
||||||
|
@ -71,7 +71,7 @@ public class MatchAllDocsQuery extends Query {
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() throws IOException {
|
public int nextDoc() throws IOException {
|
||||||
doc++;
|
doc++;
|
||||||
while(delDocs != null && doc < maxDoc && delDocs.get(doc)) {
|
while(liveDocs != null && doc < maxDoc && !liveDocs.get(doc)) {
|
||||||
doc++;
|
doc++;
|
||||||
}
|
}
|
||||||
if (doc == maxDoc) {
|
if (doc == maxDoc) {
|
||||||
|
|
|
@ -175,7 +175,7 @@ public class MultiPhraseQuery extends Query {
|
||||||
if (termArrays.size() == 0) // optimize zero-term case
|
if (termArrays.size() == 0) // optimize zero-term case
|
||||||
return null;
|
return null;
|
||||||
final IndexReader reader = context.reader;
|
final IndexReader reader = context.reader;
|
||||||
final Bits delDocs = reader.getDeletedDocs();
|
final Bits liveDocs = reader.getLiveDocs();
|
||||||
|
|
||||||
PhraseQuery.PostingsAndFreq[] postingsFreqs = new PhraseQuery.PostingsAndFreq[termArrays.size()];
|
PhraseQuery.PostingsAndFreq[] postingsFreqs = new PhraseQuery.PostingsAndFreq[termArrays.size()];
|
||||||
|
|
||||||
|
@ -196,12 +196,12 @@ public class MultiPhraseQuery extends Query {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
final Term term = terms[0];
|
final Term term = terms[0];
|
||||||
postingsEnum = reader.termPositionsEnum(delDocs,
|
postingsEnum = reader.termPositionsEnum(liveDocs,
|
||||||
term.field(),
|
term.field(),
|
||||||
term.bytes());
|
term.bytes());
|
||||||
|
|
||||||
if (postingsEnum == null) {
|
if (postingsEnum == null) {
|
||||||
if (reader.termDocsEnum(delDocs, term.field(), term.bytes()) != null) {
|
if (reader.termDocsEnum(liveDocs, term.field(), term.bytes()) != null) {
|
||||||
// term does exist, but has no positions
|
// term does exist, but has no positions
|
||||||
throw new IllegalStateException("field \"" + term.field() + "\" was indexed with Field.omitTermFreqAndPositions=true; cannot run PhraseQuery (term=" + term.text() + ")");
|
throw new IllegalStateException("field \"" + term.field() + "\" was indexed with Field.omitTermFreqAndPositions=true; cannot run PhraseQuery (term=" + term.text() + ")");
|
||||||
} else {
|
} else {
|
||||||
|
@ -497,15 +497,15 @@ class UnionDocsAndPositionsEnum extends DocsAndPositionsEnum {
|
||||||
|
|
||||||
public UnionDocsAndPositionsEnum(IndexReader indexReader, Term[] terms) throws IOException {
|
public UnionDocsAndPositionsEnum(IndexReader indexReader, Term[] terms) throws IOException {
|
||||||
List<DocsAndPositionsEnum> docsEnums = new LinkedList<DocsAndPositionsEnum>();
|
List<DocsAndPositionsEnum> docsEnums = new LinkedList<DocsAndPositionsEnum>();
|
||||||
final Bits delDocs = indexReader.getDeletedDocs();
|
final Bits liveDocs = indexReader.getLiveDocs();
|
||||||
for (int i = 0; i < terms.length; i++) {
|
for (int i = 0; i < terms.length; i++) {
|
||||||
DocsAndPositionsEnum postings = indexReader.termPositionsEnum(delDocs,
|
DocsAndPositionsEnum postings = indexReader.termPositionsEnum(liveDocs,
|
||||||
terms[i].field(),
|
terms[i].field(),
|
||||||
terms[i].bytes());
|
terms[i].bytes());
|
||||||
if (postings != null) {
|
if (postings != null) {
|
||||||
docsEnums.add(postings);
|
docsEnums.add(postings);
|
||||||
} else {
|
} else {
|
||||||
if (indexReader.termDocsEnum(delDocs, terms[i].field(), terms[i].bytes()) != null) {
|
if (indexReader.termDocsEnum(liveDocs, terms[i].field(), terms[i].bytes()) != null) {
|
||||||
// term does exist, but has no positions
|
// term does exist, but has no positions
|
||||||
throw new IllegalStateException("field \"" + terms[i].field() + "\" was indexed with Field.omitTermFreqAndPositions=true; cannot run PhraseQuery (term=" + terms[i].text() + ")");
|
throw new IllegalStateException("field \"" + terms[i].field() + "\" was indexed with Field.omitTermFreqAndPositions=true; cannot run PhraseQuery (term=" + terms[i].text() + ")");
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,14 +19,14 @@ package org.apache.lucene.search;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.DocsEnum;
|
||||||
import org.apache.lucene.index.Fields;
|
import org.apache.lucene.index.Fields;
|
||||||
import org.apache.lucene.index.IndexReader;
|
|
||||||
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
|
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
|
||||||
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.Terms;
|
import org.apache.lucene.index.Terms;
|
||||||
import org.apache.lucene.index.TermsEnum;
|
import org.apache.lucene.index.TermsEnum;
|
||||||
import org.apache.lucene.index.DocsEnum;
|
|
||||||
import org.apache.lucene.util.OpenBitSet;
|
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
|
import org.apache.lucene.util.OpenBitSet;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A wrapper for {@link MultiTermQuery}, that exposes its
|
* A wrapper for {@link MultiTermQuery}, that exposes its
|
||||||
|
@ -123,15 +123,15 @@ public class MultiTermQueryWrapperFilter<Q extends MultiTermQuery> extends Filte
|
||||||
assert termsEnum != null;
|
assert termsEnum != null;
|
||||||
if (termsEnum.next() != null) {
|
if (termsEnum.next() != null) {
|
||||||
// fill into a OpenBitSet
|
// fill into a OpenBitSet
|
||||||
final OpenBitSet bitSet = new OpenBitSet(context.reader.maxDoc());
|
final OpenBitSet bitSet = new OpenBitSet(reader.maxDoc());
|
||||||
int termCount = 0;
|
int termCount = 0;
|
||||||
final Bits delDocs = reader.getDeletedDocs();
|
final Bits liveDocs = reader.getLiveDocs();
|
||||||
DocsEnum docsEnum = null;
|
DocsEnum docsEnum = null;
|
||||||
do {
|
do {
|
||||||
termCount++;
|
termCount++;
|
||||||
// System.out.println(" iter termCount=" + termCount + " term=" +
|
// System.out.println(" iter termCount=" + termCount + " term=" +
|
||||||
// enumerator.term().toBytesString());
|
// enumerator.term().toBytesString());
|
||||||
docsEnum = termsEnum.docs(delDocs, docsEnum);
|
docsEnum = termsEnum.docs(liveDocs, docsEnum);
|
||||||
final DocsEnum.BulkReadResult result = docsEnum.getBulkResult();
|
final DocsEnum.BulkReadResult result = docsEnum.getBulkResult();
|
||||||
while (true) {
|
while (true) {
|
||||||
final int count = docsEnum.read();
|
final int count = docsEnum.read();
|
||||||
|
|
|
@ -212,17 +212,17 @@ public class PhraseQuery extends Query {
|
||||||
if (terms.size() == 0) // optimize zero-term case
|
if (terms.size() == 0) // optimize zero-term case
|
||||||
return null;
|
return null;
|
||||||
final IndexReader reader = context.reader;
|
final IndexReader reader = context.reader;
|
||||||
|
final Bits liveDocs = reader.getLiveDocs();
|
||||||
PostingsAndFreq[] postingsFreqs = new PostingsAndFreq[terms.size()];
|
PostingsAndFreq[] postingsFreqs = new PostingsAndFreq[terms.size()];
|
||||||
final Bits delDocs = reader.getDeletedDocs();
|
|
||||||
for (int i = 0; i < terms.size(); i++) {
|
for (int i = 0; i < terms.size(); i++) {
|
||||||
final Term t = terms.get(i);
|
final Term t = terms.get(i);
|
||||||
DocsAndPositionsEnum postingsEnum = reader.termPositionsEnum(delDocs,
|
DocsAndPositionsEnum postingsEnum = reader.termPositionsEnum(liveDocs,
|
||||||
t.field(),
|
t.field(),
|
||||||
t.bytes());
|
t.bytes());
|
||||||
// PhraseQuery on a field that did not index
|
// PhraseQuery on a field that did not index
|
||||||
// positions.
|
// positions.
|
||||||
if (postingsEnum == null) {
|
if (postingsEnum == null) {
|
||||||
if (reader.termDocsEnum(delDocs, t.field(), t.bytes()) != null) {
|
if (reader.termDocsEnum(liveDocs, t.field(), t.bytes()) != null) {
|
||||||
// term does exist, but has no positions
|
// term does exist, but has no positions
|
||||||
throw new IllegalStateException("field \"" + t.field() + "\" was indexed with Field.omitTermFreqAndPositions=true; cannot run PhraseQuery (term=" + t.text() + ")");
|
throw new IllegalStateException("field \"" + t.field() + "\" was indexed with Field.omitTermFreqAndPositions=true; cannot run PhraseQuery (term=" + t.text() + ")");
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -90,13 +90,12 @@ public class TermQuery extends Query {
|
||||||
final String field = term.field();
|
final String field = term.field();
|
||||||
final IndexReader reader = context.reader;
|
final IndexReader reader = context.reader;
|
||||||
assert termStates.topReaderContext == ReaderUtil.getTopLevelContext(context) : "The top-reader used to create Weight (" + termStates.topReaderContext + ") is not the same as the current reader's top-reader (" + ReaderUtil.getTopLevelContext(context);
|
assert termStates.topReaderContext == ReaderUtil.getTopLevelContext(context) : "The top-reader used to create Weight (" + termStates.topReaderContext + ") is not the same as the current reader's top-reader (" + ReaderUtil.getTopLevelContext(context);
|
||||||
final TermState state = termStates
|
final TermState state = termStates.get(context.ord);
|
||||||
.get(context.ord);
|
|
||||||
if (state == null) { // term is not present in that reader
|
if (state == null) { // term is not present in that reader
|
||||||
assert termNotInReader(reader, field, term.bytes()) : "no termstate found but term exists in reader";
|
assert termNotInReader(reader, field, term.bytes()) : "no termstate found but term exists in reader";
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
final DocsEnum docs = reader.termDocsEnum(reader.getDeletedDocs(), field, term.bytes(), state);
|
final DocsEnum docs = reader.termDocsEnum(reader.getLiveDocs(), field, term.bytes(), state);
|
||||||
assert docs != null;
|
assert docs != null;
|
||||||
return new TermScorer(this, docs, similarity, context.reader.norms(field));
|
return new TermScorer(this, docs, similarity, context.reader.norms(field));
|
||||||
}
|
}
|
||||||
|
@ -143,7 +142,7 @@ public class TermQuery extends Query {
|
||||||
|
|
||||||
Explanation tfExplanation = new Explanation();
|
Explanation tfExplanation = new Explanation();
|
||||||
int tf = 0;
|
int tf = 0;
|
||||||
DocsEnum docs = reader.termDocsEnum(reader.getDeletedDocs(), term.field(), term.bytes());
|
DocsEnum docs = reader.termDocsEnum(context.reader.getLiveDocs(), term.field(), term.bytes());
|
||||||
if (docs != null) {
|
if (docs != null) {
|
||||||
int newDoc = docs.advance(doc);
|
int newDoc = docs.advance(doc);
|
||||||
if (newDoc == doc) {
|
if (newDoc == doc) {
|
||||||
|
|
|
@ -105,7 +105,7 @@ public class DocTermsCreator extends EntryCreatorWithOptions<DocTerms>
|
||||||
if (terms != null) {
|
if (terms != null) {
|
||||||
int termCount = 0;
|
int termCount = 0;
|
||||||
final TermsEnum termsEnum = terms.iterator();
|
final TermsEnum termsEnum = terms.iterator();
|
||||||
final Bits delDocs = MultiFields.getDeletedDocs(reader);
|
final Bits liveDocs = MultiFields.getLiveDocs(reader);
|
||||||
DocsEnum docs = null;
|
DocsEnum docs = null;
|
||||||
while(true) {
|
while(true) {
|
||||||
if (termCount++ == termCountHardLimit) {
|
if (termCount++ == termCountHardLimit) {
|
||||||
|
@ -120,7 +120,7 @@ public class DocTermsCreator extends EntryCreatorWithOptions<DocTerms>
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
final long pointer = bytes.copyUsingLengthPrefix(term);
|
final long pointer = bytes.copyUsingLengthPrefix(term);
|
||||||
docs = termsEnum.docs(delDocs, docs);
|
docs = termsEnum.docs(liveDocs, docs);
|
||||||
while (true) {
|
while (true) {
|
||||||
final int docID = docs.nextDoc();
|
final int docID = docs.nextDoc();
|
||||||
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
|
if (docID == DocIdSetIterator.NO_MORE_DOCS) {
|
||||||
|
|
|
@ -322,12 +322,12 @@ public class DocTermsIndexCreator extends EntryCreatorWithOptions<DocTermsIndex>
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsEnum docs(Bits skipDocs, DocsEnum reuse) throws IOException {
|
public DocsEnum docs(Bits liveDocs, DocsEnum reuse) throws IOException {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsAndPositionsEnum docsAndPositions(Bits skipDocs, DocsAndPositionsEnum reuse) throws IOException {
|
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse) throws IOException {
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -83,14 +83,14 @@ public class SpanTermQuery extends SpanQuery {
|
||||||
@Override
|
@Override
|
||||||
public Spans getSpans(final AtomicReaderContext context) throws IOException {
|
public Spans getSpans(final AtomicReaderContext context) throws IOException {
|
||||||
final IndexReader reader = context.reader;
|
final IndexReader reader = context.reader;
|
||||||
final DocsAndPositionsEnum postings = reader.termPositionsEnum(reader.getDeletedDocs(),
|
final DocsAndPositionsEnum postings = reader.termPositionsEnum(reader.getLiveDocs(),
|
||||||
term.field(),
|
term.field(),
|
||||||
term.bytes());
|
term.bytes());
|
||||||
|
|
||||||
if (postings != null) {
|
if (postings != null) {
|
||||||
return new TermSpans(postings, term);
|
return new TermSpans(postings, term);
|
||||||
} else {
|
} else {
|
||||||
if (reader.termDocsEnum(reader.getDeletedDocs(), term.field(), term.bytes()) != null) {
|
if (reader.termDocsEnum(reader.getLiveDocs(), term.field(), term.bytes()) != null) {
|
||||||
// term does exist, but has no positions
|
// term does exist, but has no positions
|
||||||
throw new IllegalStateException("field \"" + term.field() + "\" was indexed with Field.omitTermFreqAndPositions=true; cannot run SpanTermQuery (term=" + term.text() + ")");
|
throw new IllegalStateException("field \"" + term.field() + "\" was indexed with Field.omitTermFreqAndPositions=true; cannot run SpanTermQuery (term=" + term.text() + ")");
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.lucene.util;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Arrays;
|
||||||
|
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.IndexInput;
|
import org.apache.lucene.store.IndexInput;
|
||||||
|
@ -39,6 +40,7 @@ public final class BitVector implements Cloneable, Bits {
|
||||||
private byte[] bits;
|
private byte[] bits;
|
||||||
private int size;
|
private int size;
|
||||||
private int count;
|
private int count;
|
||||||
|
private int version;
|
||||||
|
|
||||||
/** Constructs a vector capable of holding <code>n</code> bits. */
|
/** Constructs a vector capable of holding <code>n</code> bits. */
|
||||||
public BitVector(int n) {
|
public BitVector(int n) {
|
||||||
|
@ -92,8 +94,10 @@ public final class BitVector implements Cloneable, Bits {
|
||||||
return true;
|
return true;
|
||||||
else {
|
else {
|
||||||
bits[pos] = (byte) (v | flag);
|
bits[pos] = (byte) (v | flag);
|
||||||
if (count != -1)
|
if (count != -1) {
|
||||||
count++;
|
count++;
|
||||||
|
assert count <= size;
|
||||||
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -107,6 +111,25 @@ public final class BitVector implements Cloneable, Bits {
|
||||||
count = -1;
|
count = -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public final boolean getAndClear(int bit) {
|
||||||
|
if (bit >= size) {
|
||||||
|
throw new ArrayIndexOutOfBoundsException(bit);
|
||||||
|
}
|
||||||
|
final int pos = bit >> 3;
|
||||||
|
final int v = bits[pos];
|
||||||
|
final int flag = 1 << (bit & 7);
|
||||||
|
if ((flag & v) == 0) {
|
||||||
|
return false;
|
||||||
|
} else {
|
||||||
|
bits[pos] &= ~flag;
|
||||||
|
if (count != -1) {
|
||||||
|
count--;
|
||||||
|
assert count >= 0;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/** Returns <code>true</code> if <code>bit</code> is one and
|
/** Returns <code>true</code> if <code>bit</code> is one and
|
||||||
<code>false</code> if it is zero. */
|
<code>false</code> if it is zero. */
|
||||||
public final boolean get(int bit) {
|
public final boolean get(int bit) {
|
||||||
|
@ -133,8 +156,9 @@ public final class BitVector implements Cloneable, Bits {
|
||||||
if (count == -1) {
|
if (count == -1) {
|
||||||
int c = 0;
|
int c = 0;
|
||||||
int end = bits.length;
|
int end = bits.length;
|
||||||
for (int i = 0; i < end; i++)
|
for (int i = 0; i < end; i++) {
|
||||||
c += BYTE_COUNTS[bits[i] & 0xFF]; // sum bits per byte
|
c += BYTE_COUNTS[bits[i] & 0xFF]; // sum bits per byte
|
||||||
|
}
|
||||||
count = c;
|
count = c;
|
||||||
}
|
}
|
||||||
return count;
|
return count;
|
||||||
|
@ -144,8 +168,9 @@ public final class BitVector implements Cloneable, Bits {
|
||||||
public final int getRecomputedCount() {
|
public final int getRecomputedCount() {
|
||||||
int c = 0;
|
int c = 0;
|
||||||
int end = bits.length;
|
int end = bits.length;
|
||||||
for (int i = 0; i < end; i++)
|
for (int i = 0; i < end; i++) {
|
||||||
c += BYTE_COUNTS[bits[i] & 0xFF]; // sum bits per byte
|
c += BYTE_COUNTS[bits[i] & 0xFF]; // sum bits per byte
|
||||||
|
}
|
||||||
return c;
|
return c;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -171,13 +196,21 @@ public final class BitVector implements Cloneable, Bits {
|
||||||
private static String CODEC = "BitVector";
|
private static String CODEC = "BitVector";
|
||||||
|
|
||||||
// Version before version tracking was added:
|
// Version before version tracking was added:
|
||||||
private final static int VERSION_PRE = -1;
|
public final static int VERSION_PRE = -1;
|
||||||
|
|
||||||
// First version:
|
// First version:
|
||||||
private final static int VERSION_START = 0;
|
public final static int VERSION_START = 0;
|
||||||
|
|
||||||
|
// Changed DGaps to encode gaps between cleared bits, not
|
||||||
|
// set:
|
||||||
|
public final static int VERSION_DGAPS_CLEARED = 1;
|
||||||
|
|
||||||
// Increment version to change it:
|
// Increment version to change it:
|
||||||
private final static int VERSION_CURRENT = VERSION_START;
|
public final static int VERSION_CURRENT = VERSION_DGAPS_CLEARED;
|
||||||
|
|
||||||
|
public int getVersion() {
|
||||||
|
return version;
|
||||||
|
}
|
||||||
|
|
||||||
/** Writes this vector to the file <code>name</code> in Directory
|
/** Writes this vector to the file <code>name</code> in Directory
|
||||||
<code>d</code>, in a format that can be read by the constructor {@link
|
<code>d</code>, in a format that can be read by the constructor {@link
|
||||||
|
@ -188,7 +221,8 @@ public final class BitVector implements Cloneable, Bits {
|
||||||
output.writeInt(-2);
|
output.writeInt(-2);
|
||||||
CodecUtil.writeHeader(output, CODEC, VERSION_CURRENT);
|
CodecUtil.writeHeader(output, CODEC, VERSION_CURRENT);
|
||||||
if (isSparse()) {
|
if (isSparse()) {
|
||||||
writeDgaps(output); // sparse bit-set more efficiently saved as d-gaps.
|
// sparse bit-set more efficiently saved as d-gaps.
|
||||||
|
writeClearedDgaps(output);
|
||||||
} else {
|
} else {
|
||||||
writeBits(output);
|
writeBits(output);
|
||||||
}
|
}
|
||||||
|
@ -196,6 +230,38 @@ public final class BitVector implements Cloneable, Bits {
|
||||||
output.close();
|
output.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Invert all bits */
|
||||||
|
public void invertAll() {
|
||||||
|
if (count != -1) {
|
||||||
|
count = size - count;
|
||||||
|
}
|
||||||
|
if (bits.length > 0) {
|
||||||
|
for(int idx=0;idx<bits.length;idx++) {
|
||||||
|
bits[idx] = (byte) (~bits[idx]);
|
||||||
|
}
|
||||||
|
clearUnusedBits();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void clearUnusedBits() {
|
||||||
|
// Take care not to invert the "unused" bits in the
|
||||||
|
// last byte:
|
||||||
|
if (bits.length > 0) {
|
||||||
|
final int lastNBits = size & 7;
|
||||||
|
if (lastNBits != 0) {
|
||||||
|
final int mask = (1 << lastNBits)-1;
|
||||||
|
bits[bits.length-1] &= mask;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Set all bits */
|
||||||
|
public void setAll() {
|
||||||
|
Arrays.fill(bits, (byte) 0xff);
|
||||||
|
clearUnusedBits();
|
||||||
|
count = size;
|
||||||
|
}
|
||||||
|
|
||||||
/** Write as a bit set */
|
/** Write as a bit set */
|
||||||
private void writeBits(IndexOutput output) throws IOException {
|
private void writeBits(IndexOutput output) throws IOException {
|
||||||
|
@ -205,19 +271,20 @@ public final class BitVector implements Cloneable, Bits {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Write as a d-gaps list */
|
/** Write as a d-gaps list */
|
||||||
private void writeDgaps(IndexOutput output) throws IOException {
|
private void writeClearedDgaps(IndexOutput output) throws IOException {
|
||||||
output.writeInt(-1); // mark using d-gaps
|
output.writeInt(-1); // mark using d-gaps
|
||||||
output.writeInt(size()); // write size
|
output.writeInt(size()); // write size
|
||||||
output.writeInt(count()); // write count
|
output.writeInt(count()); // write count
|
||||||
int last=0;
|
int last=0;
|
||||||
int n = count();
|
int numCleared = size()-count();
|
||||||
int m = bits.length;
|
int m = bits.length;
|
||||||
for (int i=0; i<m && n>0; i++) {
|
for (int i=0; i<m && numCleared>0; i++) {
|
||||||
if (bits[i]!=0) {
|
if (bits[i]!=0xff) {
|
||||||
output.writeVInt(i-last);
|
output.writeVInt(i-last);
|
||||||
output.writeByte(bits[i]);
|
output.writeByte(bits[i]);
|
||||||
last = i;
|
last = i;
|
||||||
n -= BYTE_COUNTS[bits[i] & 0xFF];
|
numCleared -= (8-BYTE_COUNTS[bits[i] & 0xFF]);
|
||||||
|
assert numCleared >= 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -225,12 +292,12 @@ public final class BitVector implements Cloneable, Bits {
|
||||||
/** Indicates if the bit vector is sparse and should be saved as a d-gaps list, or dense, and should be saved as a bit set. */
|
/** Indicates if the bit vector is sparse and should be saved as a d-gaps list, or dense, and should be saved as a bit set. */
|
||||||
private boolean isSparse() {
|
private boolean isSparse() {
|
||||||
|
|
||||||
final int setCount = count();
|
final int clearedCount = size() - count();
|
||||||
if (setCount == 0) {
|
if (clearedCount == 0) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
final int avgGapLength = bits.length / setCount;
|
final int avgGapLength = bits.length / clearedCount;
|
||||||
|
|
||||||
// expected number of bytes for vInt encoding of each gap
|
// expected number of bytes for vInt encoding of each gap
|
||||||
final int expectedDGapBytes;
|
final int expectedDGapBytes;
|
||||||
|
@ -266,17 +333,21 @@ public final class BitVector implements Cloneable, Bits {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
final int firstInt = input.readInt();
|
final int firstInt = input.readInt();
|
||||||
final int version;
|
|
||||||
if (firstInt == -2) {
|
if (firstInt == -2) {
|
||||||
// New format, with full header & version:
|
// New format, with full header & version:
|
||||||
version = CodecUtil.checkHeader(input, CODEC, VERSION_START, VERSION_START);
|
version = CodecUtil.checkHeader(input, CODEC, VERSION_START, VERSION_CURRENT);
|
||||||
size = input.readInt();
|
size = input.readInt();
|
||||||
} else {
|
} else {
|
||||||
version = VERSION_PRE;
|
version = VERSION_PRE;
|
||||||
size = firstInt;
|
size = firstInt;
|
||||||
}
|
}
|
||||||
if (size == -1) {
|
if (size == -1) {
|
||||||
readDgaps(input);
|
if (version >= VERSION_DGAPS_CLEARED) {
|
||||||
|
readClearedDgaps(input);
|
||||||
|
} else {
|
||||||
|
readSetDgaps(input);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
readBits(input);
|
readBits(input);
|
||||||
}
|
}
|
||||||
|
@ -293,7 +364,7 @@ public final class BitVector implements Cloneable, Bits {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** read as a d-gaps list */
|
/** read as a d-gaps list */
|
||||||
private void readDgaps(IndexInput input) throws IOException {
|
private void readSetDgaps(IndexInput input) throws IOException {
|
||||||
size = input.readInt(); // (re)read size
|
size = input.readInt(); // (re)read size
|
||||||
count = input.readInt(); // read count
|
count = input.readInt(); // read count
|
||||||
bits = new byte[(size >> 3) + 1]; // allocate bits
|
bits = new byte[(size >> 3) + 1]; // allocate bits
|
||||||
|
@ -303,6 +374,24 @@ public final class BitVector implements Cloneable, Bits {
|
||||||
last += input.readVInt();
|
last += input.readVInt();
|
||||||
bits[last] = input.readByte();
|
bits[last] = input.readByte();
|
||||||
n -= BYTE_COUNTS[bits[last] & 0xFF];
|
n -= BYTE_COUNTS[bits[last] & 0xFF];
|
||||||
|
assert n >= 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** read as a d-gaps cleared bits list */
|
||||||
|
private void readClearedDgaps(IndexInput input) throws IOException {
|
||||||
|
size = input.readInt(); // (re)read size
|
||||||
|
count = input.readInt(); // read count
|
||||||
|
bits = new byte[(size >> 3) + 1]; // allocate bits
|
||||||
|
Arrays.fill(bits, (byte) 0xff);
|
||||||
|
clearUnusedBits();
|
||||||
|
int last=0;
|
||||||
|
int numCleared = size()-count();
|
||||||
|
while (numCleared>0) {
|
||||||
|
last += input.readVInt();
|
||||||
|
bits[last] = input.readByte();
|
||||||
|
numCleared -= 8-BYTE_COUNTS[bits[last] & 0xFF];
|
||||||
|
assert numCleared >= 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,13 +34,16 @@ public final class MultiBits implements Bits {
|
||||||
// length is 1+subs.length (the last entry has the maxDoc):
|
// length is 1+subs.length (the last entry has the maxDoc):
|
||||||
private final int[] starts;
|
private final int[] starts;
|
||||||
|
|
||||||
public MultiBits(List<Bits> bits, List<Integer> starts) {
|
private final boolean defaultValue;
|
||||||
|
|
||||||
|
public MultiBits(List<Bits> bits, List<Integer> starts, boolean defaultValue) {
|
||||||
assert starts.size() == 1+bits.size();
|
assert starts.size() == 1+bits.size();
|
||||||
this.subs = bits.toArray(Bits.EMPTY_ARRAY);
|
this.subs = bits.toArray(Bits.EMPTY_ARRAY);
|
||||||
this.starts = new int[starts.size()];
|
this.starts = new int[starts.size()];
|
||||||
for(int i=0;i<this.starts.length;i++) {
|
for(int i=0;i<this.starts.length;i++) {
|
||||||
this.starts[i] = starts.get(i);
|
this.starts[i] = starts.get(i);
|
||||||
}
|
}
|
||||||
|
this.defaultValue = defaultValue;
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean checkLength(int reader, int doc) {
|
private boolean checkLength(int reader, int doc) {
|
||||||
|
@ -54,7 +57,7 @@ public final class MultiBits implements Bits {
|
||||||
assert reader != -1;
|
assert reader != -1;
|
||||||
final Bits bits = subs[reader];
|
final Bits bits = subs[reader];
|
||||||
if (bits == null) {
|
if (bits == null) {
|
||||||
return false;
|
return defaultValue;
|
||||||
} else {
|
} else {
|
||||||
assert checkLength(reader, doc);
|
assert checkLength(reader, doc);
|
||||||
return bits.get(doc-starts[reader]);
|
return bits.get(doc-starts[reader]);
|
||||||
|
|
|
@ -133,7 +133,7 @@ public class OpenBitSet extends DocIdSet implements Bits, Cloneable {
|
||||||
* compatibility. This is *not* equal to {@link #cardinality}
|
* compatibility. This is *not* equal to {@link #cardinality}
|
||||||
*/
|
*/
|
||||||
public long size() {
|
public long size() {
|
||||||
return capacity();
|
return capacity();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -869,7 +869,6 @@ public class OpenBitSet extends DocIdSet implements Bits, Cloneable {
|
||||||
// empty sets from returning 0, which is too common.
|
// empty sets from returning 0, which is too common.
|
||||||
return (int)((h>>32) ^ h) + 0x98761234;
|
return (int)((h>>32) ^ h) + 0x98761234;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -346,26 +346,26 @@ public class TestExternalCodecs extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsEnum docs(Bits skipDocs, DocsEnum reuse) {
|
public DocsEnum docs(Bits liveDocs, DocsEnum reuse) {
|
||||||
return new RAMDocsEnum(ramField.termToDocs.get(current), skipDocs);
|
return new RAMDocsEnum(ramField.termToDocs.get(current), liveDocs);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsAndPositionsEnum docsAndPositions(Bits skipDocs, DocsAndPositionsEnum reuse) {
|
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse) {
|
||||||
return new RAMDocsAndPositionsEnum(ramField.termToDocs.get(current), skipDocs);
|
return new RAMDocsAndPositionsEnum(ramField.termToDocs.get(current), liveDocs);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class RAMDocsEnum extends DocsEnum {
|
private static class RAMDocsEnum extends DocsEnum {
|
||||||
private final RAMTerm ramTerm;
|
private final RAMTerm ramTerm;
|
||||||
private final Bits skipDocs;
|
private final Bits liveDocs;
|
||||||
private RAMDoc current;
|
private RAMDoc current;
|
||||||
int upto = -1;
|
int upto = -1;
|
||||||
int posUpto = 0;
|
int posUpto = 0;
|
||||||
|
|
||||||
public RAMDocsEnum(RAMTerm ramTerm, Bits skipDocs) {
|
public RAMDocsEnum(RAMTerm ramTerm, Bits liveDocs) {
|
||||||
this.ramTerm = ramTerm;
|
this.ramTerm = ramTerm;
|
||||||
this.skipDocs = skipDocs;
|
this.liveDocs = liveDocs;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -383,7 +383,7 @@ public class TestExternalCodecs extends LuceneTestCase {
|
||||||
upto++;
|
upto++;
|
||||||
if (upto < ramTerm.docs.size()) {
|
if (upto < ramTerm.docs.size()) {
|
||||||
current = ramTerm.docs.get(upto);
|
current = ramTerm.docs.get(upto);
|
||||||
if (skipDocs == null || !skipDocs.get(current.docID)) {
|
if (liveDocs == null || liveDocs.get(current.docID)) {
|
||||||
posUpto = 0;
|
posUpto = 0;
|
||||||
return current.docID;
|
return current.docID;
|
||||||
}
|
}
|
||||||
|
@ -406,14 +406,14 @@ public class TestExternalCodecs extends LuceneTestCase {
|
||||||
|
|
||||||
private static class RAMDocsAndPositionsEnum extends DocsAndPositionsEnum {
|
private static class RAMDocsAndPositionsEnum extends DocsAndPositionsEnum {
|
||||||
private final RAMTerm ramTerm;
|
private final RAMTerm ramTerm;
|
||||||
private final Bits skipDocs;
|
private final Bits liveDocs;
|
||||||
private RAMDoc current;
|
private RAMDoc current;
|
||||||
int upto = -1;
|
int upto = -1;
|
||||||
int posUpto = 0;
|
int posUpto = 0;
|
||||||
|
|
||||||
public RAMDocsAndPositionsEnum(RAMTerm ramTerm, Bits skipDocs) {
|
public RAMDocsAndPositionsEnum(RAMTerm ramTerm, Bits liveDocs) {
|
||||||
this.ramTerm = ramTerm;
|
this.ramTerm = ramTerm;
|
||||||
this.skipDocs = skipDocs;
|
this.liveDocs = liveDocs;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -431,7 +431,7 @@ public class TestExternalCodecs extends LuceneTestCase {
|
||||||
upto++;
|
upto++;
|
||||||
if (upto < ramTerm.docs.size()) {
|
if (upto < ramTerm.docs.size()) {
|
||||||
current = ramTerm.docs.get(upto);
|
current = ramTerm.docs.get(upto);
|
||||||
if (skipDocs == null || !skipDocs.get(current.docID)) {
|
if (liveDocs == null || liveDocs.get(current.docID)) {
|
||||||
posUpto = 0;
|
posUpto = 0;
|
||||||
return current.docID;
|
return current.docID;
|
||||||
}
|
}
|
||||||
|
@ -560,6 +560,9 @@ public class TestExternalCodecs extends LuceneTestCase {
|
||||||
r.close();
|
r.close();
|
||||||
s.close();
|
s.close();
|
||||||
|
|
||||||
|
if (VERBOSE) {
|
||||||
|
System.out.println("\nTEST: now delete 2nd doc");
|
||||||
|
}
|
||||||
w.deleteDocuments(new Term("id", "44"));
|
w.deleteDocuments(new Term("id", "44"));
|
||||||
w.optimize();
|
w.optimize();
|
||||||
r = IndexReader.open(w, true);
|
r = IndexReader.open(w, true);
|
||||||
|
|
|
@ -73,7 +73,7 @@ public class TestCachingTokenFilter extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
IndexReader reader = writer.getReader();
|
IndexReader reader = writer.getReader();
|
||||||
DocsAndPositionsEnum termPositions = MultiFields.getTermPositionsEnum(reader,
|
DocsAndPositionsEnum termPositions = MultiFields.getTermPositionsEnum(reader,
|
||||||
MultiFields.getDeletedDocs(reader),
|
MultiFields.getLiveDocs(reader),
|
||||||
"preanalyzed",
|
"preanalyzed",
|
||||||
new BytesRef("term1"));
|
new BytesRef("term1"));
|
||||||
assertTrue(termPositions.nextDoc() != termPositions.NO_MORE_DOCS);
|
assertTrue(termPositions.nextDoc() != termPositions.NO_MORE_DOCS);
|
||||||
|
@ -81,7 +81,7 @@ public class TestCachingTokenFilter extends BaseTokenStreamTestCase {
|
||||||
assertEquals(0, termPositions.nextPosition());
|
assertEquals(0, termPositions.nextPosition());
|
||||||
|
|
||||||
termPositions = MultiFields.getTermPositionsEnum(reader,
|
termPositions = MultiFields.getTermPositionsEnum(reader,
|
||||||
MultiFields.getDeletedDocs(reader),
|
MultiFields.getLiveDocs(reader),
|
||||||
"preanalyzed",
|
"preanalyzed",
|
||||||
new BytesRef("term2"));
|
new BytesRef("term2"));
|
||||||
assertTrue(termPositions.nextDoc() != termPositions.NO_MORE_DOCS);
|
assertTrue(termPositions.nextDoc() != termPositions.NO_MORE_DOCS);
|
||||||
|
@ -90,7 +90,7 @@ public class TestCachingTokenFilter extends BaseTokenStreamTestCase {
|
||||||
assertEquals(3, termPositions.nextPosition());
|
assertEquals(3, termPositions.nextPosition());
|
||||||
|
|
||||||
termPositions = MultiFields.getTermPositionsEnum(reader,
|
termPositions = MultiFields.getTermPositionsEnum(reader,
|
||||||
MultiFields.getDeletedDocs(reader),
|
MultiFields.getLiveDocs(reader),
|
||||||
"preanalyzed",
|
"preanalyzed",
|
||||||
new BytesRef("term3"));
|
new BytesRef("term3"));
|
||||||
assertTrue(termPositions.nextDoc() != termPositions.NO_MORE_DOCS);
|
assertTrue(termPositions.nextDoc() != termPositions.NO_MORE_DOCS);
|
||||||
|
|
|
@ -281,10 +281,10 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
||||||
|
|
||||||
_TestUtil.checkIndex(dir);
|
_TestUtil.checkIndex(dir);
|
||||||
|
|
||||||
final Bits delDocs = MultiFields.getDeletedDocs(reader);
|
final Bits liveDocs = MultiFields.getLiveDocs(reader);
|
||||||
|
|
||||||
for(int i=0;i<35;i++) {
|
for(int i=0;i<35;i++) {
|
||||||
if (!delDocs.get(i)) {
|
if (liveDocs.get(i)) {
|
||||||
Document d = reader.document(i);
|
Document d = reader.document(i);
|
||||||
List<Fieldable> fields = d.getFields();
|
List<Fieldable> fields = d.getFields();
|
||||||
if (d.getField("content3") == null) {
|
if (d.getField("content3") == null) {
|
||||||
|
|
|
@ -611,6 +611,9 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
||||||
final int N = 10;
|
final int N = 10;
|
||||||
|
|
||||||
for(int pass=0;pass<2;pass++) {
|
for(int pass=0;pass<2;pass++) {
|
||||||
|
if (VERBOSE) {
|
||||||
|
System.out.println("TEST: pass=" + pass);
|
||||||
|
}
|
||||||
|
|
||||||
boolean useCompoundFile = (pass % 2) != 0;
|
boolean useCompoundFile = (pass % 2) != 0;
|
||||||
|
|
||||||
|
@ -631,7 +634,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
||||||
|
|
||||||
for(int i=0;i<N+1;i++) {
|
for(int i=0;i<N+1;i++) {
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println("\nTEST: cycle i=" + i);
|
System.out.println("\nTEST: write i=" + i);
|
||||||
}
|
}
|
||||||
conf = newIndexWriterConfig(
|
conf = newIndexWriterConfig(
|
||||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||||
|
@ -692,8 +695,14 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
||||||
int expectedCount = 176;
|
int expectedCount = 176;
|
||||||
searcher.close();
|
searcher.close();
|
||||||
for(int i=0;i<N+1;i++) {
|
for(int i=0;i<N+1;i++) {
|
||||||
|
if (VERBOSE) {
|
||||||
|
System.out.println("TEST: i=" + i);
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
IndexReader reader = IndexReader.open(dir, true);
|
IndexReader reader = IndexReader.open(dir, true);
|
||||||
|
if (VERBOSE) {
|
||||||
|
System.out.println(" got reader=" + reader);
|
||||||
|
}
|
||||||
|
|
||||||
// Work backwards in commits on what the expected
|
// Work backwards in commits on what the expected
|
||||||
// count should be.
|
// count should be.
|
||||||
|
@ -706,7 +715,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
||||||
expectedCount -= 17;
|
expectedCount -= 17;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assertEquals(expectedCount, hits.length);
|
assertEquals("maxDoc=" + searcher.maxDoc() + " numDocs=" + searcher.getIndexReader().numDocs(), expectedCount, hits.length);
|
||||||
searcher.close();
|
searcher.close();
|
||||||
reader.close();
|
reader.close();
|
||||||
if (i == N) {
|
if (i == N) {
|
||||||
|
|
|
@ -168,13 +168,13 @@ public class TestDirectoryReader extends LuceneTestCase {
|
||||||
TermsEnum te2 = MultiFields.getTerms(mr2, "body").iterator();
|
TermsEnum te2 = MultiFields.getTerms(mr2, "body").iterator();
|
||||||
te2.seekCeil(new BytesRef("wow"));
|
te2.seekCeil(new BytesRef("wow"));
|
||||||
DocsEnum td = MultiFields.getTermDocsEnum(mr2,
|
DocsEnum td = MultiFields.getTermDocsEnum(mr2,
|
||||||
MultiFields.getDeletedDocs(mr2),
|
MultiFields.getLiveDocs(mr2),
|
||||||
"body",
|
"body",
|
||||||
te2.term());
|
te2.term());
|
||||||
|
|
||||||
TermsEnum te3 = MultiFields.getTerms(mr3, "body").iterator();
|
TermsEnum te3 = MultiFields.getTerms(mr3, "body").iterator();
|
||||||
te3.seekCeil(new BytesRef("wow"));
|
te3.seekCeil(new BytesRef("wow"));
|
||||||
td = te3.docs(MultiFields.getDeletedDocs(mr3),
|
td = te3.docs(MultiFields.getLiveDocs(mr3),
|
||||||
td);
|
td);
|
||||||
|
|
||||||
int ret = 0;
|
int ret = 0;
|
||||||
|
|
|
@ -234,7 +234,7 @@ public class TestDoc extends LuceneTestCase {
|
||||||
out.print(" term=" + field + ":" + tis.term());
|
out.print(" term=" + field + ":" + tis.term());
|
||||||
out.println(" DF=" + tis.docFreq());
|
out.println(" DF=" + tis.docFreq());
|
||||||
|
|
||||||
DocsAndPositionsEnum positions = tis.docsAndPositions(reader.getDeletedDocs(), null);
|
DocsAndPositionsEnum positions = tis.docsAndPositions(reader.getLiveDocs(), null);
|
||||||
|
|
||||||
while (positions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
|
while (positions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
|
||||||
out.print(" doc=" + positions.docID());
|
out.print(" doc=" + positions.docID());
|
||||||
|
|
|
@ -89,17 +89,17 @@ public class TestDocsAndPositions extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public DocsAndPositionsEnum getDocsAndPositions(IndexReader reader,
|
public DocsAndPositionsEnum getDocsAndPositions(IndexReader reader,
|
||||||
BytesRef bytes, Bits skipDocs) throws IOException {
|
BytesRef bytes, Bits liveDocs) throws IOException {
|
||||||
return reader.termPositionsEnum(null, fieldName, bytes);
|
return reader.termPositionsEnum(null, fieldName, bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
public DocsEnum getDocsEnum(IndexReader reader, BytesRef bytes,
|
public DocsEnum getDocsEnum(IndexReader reader, BytesRef bytes,
|
||||||
boolean freqs, Bits skipDocs) throws IOException {
|
boolean freqs, Bits liveDocs) throws IOException {
|
||||||
int randInt = random.nextInt(10);
|
int randInt = random.nextInt(10);
|
||||||
if (randInt == 0) { // once in a while throw in a positions enum
|
if (randInt == 0) { // once in a while throw in a positions enum
|
||||||
return getDocsAndPositions(reader, bytes, skipDocs);
|
return getDocsAndPositions(reader, bytes, liveDocs);
|
||||||
} else {
|
} else {
|
||||||
return reader.termDocsEnum(skipDocs, fieldName, bytes);
|
return reader.termDocsEnum(liveDocs, fieldName, bytes);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -131,7 +131,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
||||||
writer.close();
|
writer.close();
|
||||||
SegmentReader reader = SegmentReader.get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
|
SegmentReader reader = SegmentReader.get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
|
||||||
|
|
||||||
DocsAndPositionsEnum termPositions = MultiFields.getTermPositionsEnum(reader, MultiFields.getDeletedDocs(reader),
|
DocsAndPositionsEnum termPositions = MultiFields.getTermPositionsEnum(reader, MultiFields.getLiveDocs(reader),
|
||||||
"repeated", new BytesRef("repeated"));
|
"repeated", new BytesRef("repeated"));
|
||||||
assertTrue(termPositions.nextDoc() != termPositions.NO_MORE_DOCS);
|
assertTrue(termPositions.nextDoc() != termPositions.NO_MORE_DOCS);
|
||||||
int freq = termPositions.freq();
|
int freq = termPositions.freq();
|
||||||
|
@ -195,7 +195,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
||||||
writer.close();
|
writer.close();
|
||||||
SegmentReader reader = SegmentReader.get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
|
SegmentReader reader = SegmentReader.get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
|
||||||
|
|
||||||
DocsAndPositionsEnum termPositions = reader.fields().terms("f1").docsAndPositions(reader.getDeletedDocs(), new BytesRef("a"), null);
|
DocsAndPositionsEnum termPositions = reader.fields().terms("f1").docsAndPositions(reader.getLiveDocs(), new BytesRef("a"), null);
|
||||||
assertTrue(termPositions.nextDoc() != termPositions.NO_MORE_DOCS);
|
assertTrue(termPositions.nextDoc() != termPositions.NO_MORE_DOCS);
|
||||||
int freq = termPositions.freq();
|
int freq = termPositions.freq();
|
||||||
assertEquals(3, freq);
|
assertEquals(3, freq);
|
||||||
|
@ -239,18 +239,18 @@ public class TestDocumentWriter extends LuceneTestCase {
|
||||||
writer.close();
|
writer.close();
|
||||||
SegmentReader reader = SegmentReader.get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
|
SegmentReader reader = SegmentReader.get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
|
||||||
|
|
||||||
DocsAndPositionsEnum termPositions = reader.fields().terms("preanalyzed").docsAndPositions(reader.getDeletedDocs(), new BytesRef("term1"), null);
|
DocsAndPositionsEnum termPositions = reader.fields().terms("preanalyzed").docsAndPositions(reader.getLiveDocs(), new BytesRef("term1"), null);
|
||||||
assertTrue(termPositions.nextDoc() != termPositions.NO_MORE_DOCS);
|
assertTrue(termPositions.nextDoc() != termPositions.NO_MORE_DOCS);
|
||||||
assertEquals(1, termPositions.freq());
|
assertEquals(1, termPositions.freq());
|
||||||
assertEquals(0, termPositions.nextPosition());
|
assertEquals(0, termPositions.nextPosition());
|
||||||
|
|
||||||
termPositions = reader.fields().terms("preanalyzed").docsAndPositions(reader.getDeletedDocs(), new BytesRef("term2"), null);
|
termPositions = reader.fields().terms("preanalyzed").docsAndPositions(reader.getLiveDocs(), new BytesRef("term2"), null);
|
||||||
assertTrue(termPositions.nextDoc() != termPositions.NO_MORE_DOCS);
|
assertTrue(termPositions.nextDoc() != termPositions.NO_MORE_DOCS);
|
||||||
assertEquals(2, termPositions.freq());
|
assertEquals(2, termPositions.freq());
|
||||||
assertEquals(1, termPositions.nextPosition());
|
assertEquals(1, termPositions.nextPosition());
|
||||||
assertEquals(3, termPositions.nextPosition());
|
assertEquals(3, termPositions.nextPosition());
|
||||||
|
|
||||||
termPositions = reader.fields().terms("preanalyzed").docsAndPositions(reader.getDeletedDocs(), new BytesRef("term3"), null);
|
termPositions = reader.fields().terms("preanalyzed").docsAndPositions(reader.getLiveDocs(), new BytesRef("term3"), null);
|
||||||
assertTrue(termPositions.nextDoc() != termPositions.NO_MORE_DOCS);
|
assertTrue(termPositions.nextDoc() != termPositions.NO_MORE_DOCS);
|
||||||
assertEquals(1, termPositions.freq());
|
assertEquals(1, termPositions.freq());
|
||||||
assertEquals(2, termPositions.nextPosition());
|
assertEquals(2, termPositions.nextPosition());
|
||||||
|
|
|
@ -87,8 +87,8 @@ public class TestFilterIndexReader extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocsAndPositionsEnum docsAndPositions(Bits skipDocs, DocsAndPositionsEnum reuse) throws IOException {
|
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse) throws IOException {
|
||||||
return new TestPositions(super.docsAndPositions(skipDocs, reuse == null ? null : ((FilterDocsAndPositionsEnum) reuse).in));
|
return new TestPositions(super.docsAndPositions(liveDocs, reuse == null ? null : ((FilterDocsAndPositionsEnum) reuse).in));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -157,7 +157,7 @@ public class TestFilterIndexReader extends LuceneTestCase {
|
||||||
|
|
||||||
assertEquals(TermsEnum.SeekStatus.FOUND, terms.seekCeil(new BytesRef("one")));
|
assertEquals(TermsEnum.SeekStatus.FOUND, terms.seekCeil(new BytesRef("one")));
|
||||||
|
|
||||||
DocsAndPositionsEnum positions = terms.docsAndPositions(MultiFields.getDeletedDocs(reader),
|
DocsAndPositionsEnum positions = terms.docsAndPositions(MultiFields.getLiveDocs(reader),
|
||||||
null);
|
null);
|
||||||
while (positions.nextDoc() != DocsEnum.NO_MORE_DOCS) {
|
while (positions.nextDoc() != DocsEnum.NO_MORE_DOCS) {
|
||||||
assertTrue((positions.docID() % 2) == 1);
|
assertTrue((positions.docID() % 2) == 1);
|
||||||
|
|
|
@ -310,7 +310,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
int expected)
|
int expected)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
DocsEnum tdocs = MultiFields.getTermDocsEnum(reader,
|
DocsEnum tdocs = MultiFields.getTermDocsEnum(reader,
|
||||||
MultiFields.getDeletedDocs(reader),
|
MultiFields.getLiveDocs(reader),
|
||||||
term.field(),
|
term.field(),
|
||||||
new BytesRef(term.text()));
|
new BytesRef(term.text()));
|
||||||
int count = 0;
|
int count = 0;
|
||||||
|
@ -849,17 +849,17 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
}
|
}
|
||||||
|
|
||||||
// check deletions
|
// check deletions
|
||||||
final Bits delDocs1 = MultiFields.getDeletedDocs(index1);
|
final Bits liveDocs1 = MultiFields.getLiveDocs(index1);
|
||||||
final Bits delDocs2 = MultiFields.getDeletedDocs(index2);
|
final Bits liveDocs2 = MultiFields.getLiveDocs(index2);
|
||||||
for (int i = 0; i < index1.maxDoc(); i++) {
|
for (int i = 0; i < index1.maxDoc(); i++) {
|
||||||
assertEquals("Doc " + i + " only deleted in one index.",
|
assertEquals("Doc " + i + " only deleted in one index.",
|
||||||
delDocs1 == null || delDocs1.get(i),
|
liveDocs1 == null || !liveDocs1.get(i),
|
||||||
delDocs2 == null || delDocs2.get(i));
|
liveDocs2 == null || !liveDocs2.get(i));
|
||||||
}
|
}
|
||||||
|
|
||||||
// check stored fields
|
// check stored fields
|
||||||
for (int i = 0; i < index1.maxDoc(); i++) {
|
for (int i = 0; i < index1.maxDoc(); i++) {
|
||||||
if (delDocs1 == null || !delDocs1.get(i)) {
|
if (liveDocs1 == null || liveDocs1.get(i)) {
|
||||||
Document doc1 = index1.document(i);
|
Document doc1 = index1.document(i);
|
||||||
Document doc2 = index2.document(i);
|
Document doc2 = index2.document(i);
|
||||||
List<Fieldable> fieldable1 = doc1.getFields();
|
List<Fieldable> fieldable1 = doc1.getFields();
|
||||||
|
@ -880,15 +880,15 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
FieldsEnum fenum1 = MultiFields.getFields(index1).iterator();
|
FieldsEnum fenum1 = MultiFields.getFields(index1).iterator();
|
||||||
FieldsEnum fenum2 = MultiFields.getFields(index1).iterator();
|
FieldsEnum fenum2 = MultiFields.getFields(index1).iterator();
|
||||||
String field1 = null;
|
String field1 = null;
|
||||||
Bits delDocs = MultiFields.getDeletedDocs(index1);
|
Bits liveDocs = MultiFields.getLiveDocs(index1);
|
||||||
while((field1=fenum1.next()) != null) {
|
while((field1=fenum1.next()) != null) {
|
||||||
assertEquals("Different fields", field1, fenum2.next());
|
assertEquals("Different fields", field1, fenum2.next());
|
||||||
TermsEnum enum1 = fenum1.terms();
|
TermsEnum enum1 = fenum1.terms();
|
||||||
TermsEnum enum2 = fenum2.terms();
|
TermsEnum enum2 = fenum2.terms();
|
||||||
while(enum1.next() != null) {
|
while(enum1.next() != null) {
|
||||||
assertEquals("Different terms", enum1.term(), enum2.next());
|
assertEquals("Different terms", enum1.term(), enum2.next());
|
||||||
DocsAndPositionsEnum tp1 = enum1.docsAndPositions(delDocs, null);
|
DocsAndPositionsEnum tp1 = enum1.docsAndPositions(liveDocs, null);
|
||||||
DocsAndPositionsEnum tp2 = enum2.docsAndPositions(delDocs, null);
|
DocsAndPositionsEnum tp2 = enum2.docsAndPositions(liveDocs, null);
|
||||||
|
|
||||||
while(tp1.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
|
while(tp1.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
|
||||||
assertTrue(tp2.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
|
assertTrue(tp2.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
|
||||||
|
|
|
@ -29,8 +29,8 @@ import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests cloning multiple types of readers, modifying the deletedDocs and norms
|
* Tests cloning multiple types of readers, modifying the liveDocs and norms
|
||||||
* and verifies copy on write semantics of the deletedDocs and norms is
|
* and verifies copy on write semantics of the liveDocs and norms is
|
||||||
* implemented properly
|
* implemented properly
|
||||||
*/
|
*/
|
||||||
public class TestIndexReaderClone extends LuceneTestCase {
|
public class TestIndexReaderClone extends LuceneTestCase {
|
||||||
|
@ -282,9 +282,9 @@ public class TestIndexReaderClone extends LuceneTestCase {
|
||||||
assertTrue(sim.decodeNormValue(MultiNorms.norms(r1, "field1")[4]) == norm1);
|
assertTrue(sim.decodeNormValue(MultiNorms.norms(r1, "field1")[4]) == norm1);
|
||||||
assertTrue(sim.decodeNormValue(MultiNorms.norms(pr1Clone, "field1")[4]) != norm1);
|
assertTrue(sim.decodeNormValue(MultiNorms.norms(pr1Clone, "field1")[4]) != norm1);
|
||||||
|
|
||||||
final Bits delDocs = MultiFields.getDeletedDocs(r1);
|
final Bits liveDocs = MultiFields.getLiveDocs(r1);
|
||||||
assertTrue(delDocs == null || !delDocs.get(10));
|
assertTrue(liveDocs == null || liveDocs.get(10));
|
||||||
assertTrue(MultiFields.getDeletedDocs(pr1Clone).get(10));
|
assertFalse(MultiFields.getLiveDocs(pr1Clone).get(10));
|
||||||
|
|
||||||
// try to update the original reader, which should throw an exception
|
// try to update the original reader, which should throw an exception
|
||||||
try {
|
try {
|
||||||
|
@ -318,7 +318,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
|
||||||
origSegmentReader.deleteDocument(10);
|
origSegmentReader.deleteDocument(10);
|
||||||
assertDelDocsRefCountEquals(1, origSegmentReader);
|
assertDelDocsRefCountEquals(1, origSegmentReader);
|
||||||
origSegmentReader.undeleteAll();
|
origSegmentReader.undeleteAll();
|
||||||
assertNull(origSegmentReader.deletedDocsRef);
|
assertNull(origSegmentReader.liveDocsRef);
|
||||||
origSegmentReader.close();
|
origSegmentReader.close();
|
||||||
// need to test norms?
|
// need to test norms?
|
||||||
dir1.close();
|
dir1.close();
|
||||||
|
@ -350,10 +350,10 @@ public class TestIndexReaderClone extends LuceneTestCase {
|
||||||
|
|
||||||
IndexReader origReader = IndexReader.open(dir1, false);
|
IndexReader origReader = IndexReader.open(dir1, false);
|
||||||
SegmentReader origSegmentReader = getOnlySegmentReader(origReader);
|
SegmentReader origSegmentReader = getOnlySegmentReader(origReader);
|
||||||
// deletedDocsRef should be null because nothing has updated yet
|
// liveDocsRef should be null because nothing has updated yet
|
||||||
assertNull(origSegmentReader.deletedDocsRef);
|
assertNull(origSegmentReader.liveDocsRef);
|
||||||
|
|
||||||
// we deleted a document, so there is now a deletedDocs bitvector and a
|
// we deleted a document, so there is now a liveDocs bitvector and a
|
||||||
// reference to it
|
// reference to it
|
||||||
origReader.deleteDocument(1);
|
origReader.deleteDocument(1);
|
||||||
assertDelDocsRefCountEquals(1, origSegmentReader);
|
assertDelDocsRefCountEquals(1, origSegmentReader);
|
||||||
|
@ -363,7 +363,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
|
||||||
IndexReader clonedReader = (IndexReader) origReader.clone();
|
IndexReader clonedReader = (IndexReader) origReader.clone();
|
||||||
SegmentReader clonedSegmentReader = getOnlySegmentReader(clonedReader);
|
SegmentReader clonedSegmentReader = getOnlySegmentReader(clonedReader);
|
||||||
assertDelDocsRefCountEquals(2, origSegmentReader);
|
assertDelDocsRefCountEquals(2, origSegmentReader);
|
||||||
// deleting a document creates a new deletedDocs bitvector, the refs goes to
|
// deleting a document creates a new liveDocs bitvector, the refs goes to
|
||||||
// 1
|
// 1
|
||||||
clonedReader.deleteDocument(2);
|
clonedReader.deleteDocument(2);
|
||||||
assertDelDocsRefCountEquals(1, origSegmentReader);
|
assertDelDocsRefCountEquals(1, origSegmentReader);
|
||||||
|
@ -371,13 +371,13 @@ public class TestIndexReaderClone extends LuceneTestCase {
|
||||||
|
|
||||||
// make sure the deletedocs objects are different (copy
|
// make sure the deletedocs objects are different (copy
|
||||||
// on write)
|
// on write)
|
||||||
assertTrue(origSegmentReader.deletedDocs != clonedSegmentReader.deletedDocs);
|
assertTrue(origSegmentReader.liveDocs != clonedSegmentReader.liveDocs);
|
||||||
|
|
||||||
assertDocDeleted(origSegmentReader, clonedSegmentReader, 1);
|
assertDocDeleted(origSegmentReader, clonedSegmentReader, 1);
|
||||||
final Bits delDocs = origSegmentReader.getDeletedDocs();
|
final Bits liveDocs = origSegmentReader.getLiveDocs();
|
||||||
assertTrue(delDocs == null || !delDocs.get(2)); // doc 2 should not be deleted
|
assertTrue(liveDocs == null || liveDocs.get(2)); // doc 2 should not be deleted
|
||||||
// in original segmentreader
|
// in original segmentreader
|
||||||
assertTrue(clonedSegmentReader.getDeletedDocs().get(2)); // doc 2 should be deleted in
|
assertFalse(clonedSegmentReader.getLiveDocs().get(2)); // doc 2 should be deleted in
|
||||||
// cloned segmentreader
|
// cloned segmentreader
|
||||||
|
|
||||||
// deleting a doc from the original segmentreader should throw an exception
|
// deleting a doc from the original segmentreader should throw an exception
|
||||||
|
@ -419,7 +419,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
|
||||||
clonedReader.close();
|
clonedReader.close();
|
||||||
|
|
||||||
IndexReader r = IndexReader.open(dir1, false);
|
IndexReader r = IndexReader.open(dir1, false);
|
||||||
assertTrue(MultiFields.getDeletedDocs(r).get(1));
|
assertFalse(MultiFields.getLiveDocs(r).get(1));
|
||||||
r.close();
|
r.close();
|
||||||
dir1.close();
|
dir1.close();
|
||||||
}
|
}
|
||||||
|
@ -448,11 +448,11 @@ public class TestIndexReaderClone extends LuceneTestCase {
|
||||||
|
|
||||||
private void assertDocDeleted(SegmentReader reader, SegmentReader reader2,
|
private void assertDocDeleted(SegmentReader reader, SegmentReader reader2,
|
||||||
int doc) {
|
int doc) {
|
||||||
assertEquals(reader.getDeletedDocs().get(doc), reader2.getDeletedDocs().get(doc));
|
assertEquals(reader.getLiveDocs().get(doc), reader2.getLiveDocs().get(doc));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertDelDocsRefCountEquals(int refCount, SegmentReader reader) {
|
private void assertDelDocsRefCountEquals(int refCount, SegmentReader reader) {
|
||||||
assertEquals(refCount, reader.deletedDocsRef.get());
|
assertEquals(refCount, reader.liveDocsRef.get());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCloneSubreaders() throws Exception {
|
public void testCloneSubreaders() throws Exception {
|
||||||
|
|
|
@ -285,17 +285,17 @@ public class TestIndexReaderDelete extends LuceneTestCase {
|
||||||
IndexReader r = new SlowMultiReaderWrapper(w.getReader());
|
IndexReader r = new SlowMultiReaderWrapper(w.getReader());
|
||||||
w.close();
|
w.close();
|
||||||
|
|
||||||
assertNull(r.getDeletedDocs());
|
assertNull(r.getLiveDocs());
|
||||||
r.close();
|
r.close();
|
||||||
|
|
||||||
r = new SlowMultiReaderWrapper(IndexReader.open(dir, false));
|
r = new SlowMultiReaderWrapper(IndexReader.open(dir, false));
|
||||||
|
|
||||||
assertNull(r.getDeletedDocs());
|
assertNull(r.getLiveDocs());
|
||||||
assertEquals(1, r.deleteDocuments(new Term("f", "doctor")));
|
assertEquals(1, r.deleteDocuments(new Term("f", "doctor")));
|
||||||
assertNotNull(r.getDeletedDocs());
|
assertNotNull(r.getLiveDocs());
|
||||||
assertTrue(r.getDeletedDocs().get(0));
|
assertFalse(r.getLiveDocs().get(0));
|
||||||
assertEquals(1, r.deleteDocuments(new Term("f", "who")));
|
assertEquals(1, r.deleteDocuments(new Term("f", "who")));
|
||||||
assertTrue(r.getDeletedDocs().get(1));
|
assertFalse(r.getLiveDocs().get(1));
|
||||||
r.close();
|
r.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
|
@ -1115,16 +1115,16 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
||||||
SegmentReader sr2 = (SegmentReader) r2.getSequentialSubReaders()[0]; // and reopened IRs
|
SegmentReader sr2 = (SegmentReader) r2.getSequentialSubReaders()[0]; // and reopened IRs
|
||||||
|
|
||||||
// At this point they share the same BitVector
|
// At this point they share the same BitVector
|
||||||
assertTrue(sr1.deletedDocs==sr2.deletedDocs);
|
assertTrue(sr1.liveDocs==sr2.liveDocs);
|
||||||
|
|
||||||
r2.deleteDocument(0);
|
r2.deleteDocument(0);
|
||||||
|
|
||||||
// r1 should not see the delete
|
// r1 should not see the delete
|
||||||
final Bits r1DelDocs = MultiFields.getDeletedDocs(r1);
|
final Bits r1LiveDocs = MultiFields.getLiveDocs(r1);
|
||||||
assertFalse(r1DelDocs != null && r1DelDocs.get(0));
|
assertFalse(r1LiveDocs != null && !r1LiveDocs.get(0));
|
||||||
|
|
||||||
// Now r2 should have made a private copy of deleted docs:
|
// Now r2 should have made a private copy of deleted docs:
|
||||||
assertTrue(sr1.deletedDocs!=sr2.deletedDocs);
|
assertTrue(sr1.liveDocs!=sr2.liveDocs);
|
||||||
|
|
||||||
r1.close();
|
r1.close();
|
||||||
r2.close();
|
r2.close();
|
||||||
|
@ -1150,12 +1150,12 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
||||||
SegmentReader sr2 = (SegmentReader) rs2[0];
|
SegmentReader sr2 = (SegmentReader) rs2[0];
|
||||||
|
|
||||||
// At this point they share the same BitVector
|
// At this point they share the same BitVector
|
||||||
assertTrue(sr1.deletedDocs==sr2.deletedDocs);
|
assertTrue(sr1.liveDocs==sr2.liveDocs);
|
||||||
final BitVector delDocs = sr1.deletedDocs;
|
final BitVector liveDocs = sr1.liveDocs;
|
||||||
r1.close();
|
r1.close();
|
||||||
|
|
||||||
r2.deleteDocument(0);
|
r2.deleteDocument(0);
|
||||||
assertTrue(delDocs==sr2.deletedDocs);
|
assertTrue(liveDocs==sr2.liveDocs);
|
||||||
r2.close();
|
r2.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
|
@ -536,7 +536,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
||||||
Term t = new Term("field", "a");
|
Term t = new Term("field", "a");
|
||||||
assertEquals(1, reader.docFreq(t));
|
assertEquals(1, reader.docFreq(t));
|
||||||
DocsEnum td = MultiFields.getTermDocsEnum(reader,
|
DocsEnum td = MultiFields.getTermDocsEnum(reader,
|
||||||
MultiFields.getDeletedDocs(reader),
|
MultiFields.getLiveDocs(reader),
|
||||||
"field",
|
"field",
|
||||||
new BytesRef("a"));
|
new BytesRef("a"));
|
||||||
td.nextDoc();
|
td.nextDoc();
|
||||||
|
@ -947,7 +947,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
||||||
assertEquals(1, hits.length);
|
assertEquals(1, hits.length);
|
||||||
|
|
||||||
DocsAndPositionsEnum tps = MultiFields.getTermPositionsEnum(s.getIndexReader(),
|
DocsAndPositionsEnum tps = MultiFields.getTermPositionsEnum(s.getIndexReader(),
|
||||||
MultiFields.getDeletedDocs(s.getIndexReader()),
|
MultiFields.getLiveDocs(s.getIndexReader()),
|
||||||
"field",
|
"field",
|
||||||
new BytesRef("a"));
|
new BytesRef("a"));
|
||||||
|
|
||||||
|
|
|
@ -484,7 +484,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
||||||
// Make sure the doc that hit the exception was marked
|
// Make sure the doc that hit the exception was marked
|
||||||
// as deleted:
|
// as deleted:
|
||||||
DocsEnum tdocs = MultiFields.getTermDocsEnum(reader,
|
DocsEnum tdocs = MultiFields.getTermDocsEnum(reader,
|
||||||
MultiFields.getDeletedDocs(reader),
|
MultiFields.getLiveDocs(reader),
|
||||||
t.field(),
|
t.field(),
|
||||||
new BytesRef(t.text()));
|
new BytesRef(t.text()));
|
||||||
|
|
||||||
|
@ -624,10 +624,10 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
||||||
assertEquals(expected, reader.docFreq(new Term("contents", "here")));
|
assertEquals(expected, reader.docFreq(new Term("contents", "here")));
|
||||||
assertEquals(expected, reader.maxDoc());
|
assertEquals(expected, reader.maxDoc());
|
||||||
int numDel = 0;
|
int numDel = 0;
|
||||||
final Bits delDocs = MultiFields.getDeletedDocs(reader);
|
final Bits liveDocs = MultiFields.getLiveDocs(reader);
|
||||||
assertNotNull(delDocs);
|
assertNotNull(liveDocs);
|
||||||
for(int j=0;j<reader.maxDoc();j++) {
|
for(int j=0;j<reader.maxDoc();j++) {
|
||||||
if (delDocs.get(j))
|
if (!liveDocs.get(j))
|
||||||
numDel++;
|
numDel++;
|
||||||
else {
|
else {
|
||||||
reader.document(j);
|
reader.document(j);
|
||||||
|
@ -653,7 +653,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
||||||
assertEquals(expected, reader.docFreq(new Term("contents", "here")));
|
assertEquals(expected, reader.docFreq(new Term("contents", "here")));
|
||||||
assertEquals(expected, reader.maxDoc());
|
assertEquals(expected, reader.maxDoc());
|
||||||
int numDel = 0;
|
int numDel = 0;
|
||||||
assertNull(MultiFields.getDeletedDocs(reader));
|
assertNull(MultiFields.getLiveDocs(reader));
|
||||||
for(int j=0;j<reader.maxDoc();j++) {
|
for(int j=0;j<reader.maxDoc();j++) {
|
||||||
reader.document(j);
|
reader.document(j);
|
||||||
reader.getTermFreqVectors(j);
|
reader.getTermFreqVectors(j);
|
||||||
|
@ -743,10 +743,10 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
||||||
assertEquals("i=" + i, expected, reader.docFreq(new Term("contents", "here")));
|
assertEquals("i=" + i, expected, reader.docFreq(new Term("contents", "here")));
|
||||||
assertEquals(expected, reader.maxDoc());
|
assertEquals(expected, reader.maxDoc());
|
||||||
int numDel = 0;
|
int numDel = 0;
|
||||||
final Bits delDocs = MultiFields.getDeletedDocs(reader);
|
final Bits liveDocs = MultiFields.getLiveDocs(reader);
|
||||||
assertNotNull(delDocs);
|
assertNotNull(liveDocs);
|
||||||
for(int j=0;j<reader.maxDoc();j++) {
|
for(int j=0;j<reader.maxDoc();j++) {
|
||||||
if (delDocs.get(j))
|
if (!liveDocs.get(j))
|
||||||
numDel++;
|
numDel++;
|
||||||
else {
|
else {
|
||||||
reader.document(j);
|
reader.document(j);
|
||||||
|
@ -771,7 +771,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
||||||
expected += 17-NUM_THREAD*NUM_ITER;
|
expected += 17-NUM_THREAD*NUM_ITER;
|
||||||
assertEquals(expected, reader.docFreq(new Term("contents", "here")));
|
assertEquals(expected, reader.docFreq(new Term("contents", "here")));
|
||||||
assertEquals(expected, reader.maxDoc());
|
assertEquals(expected, reader.maxDoc());
|
||||||
assertNull(MultiFields.getDeletedDocs(reader));
|
assertNull(MultiFields.getLiveDocs(reader));
|
||||||
for(int j=0;j<reader.maxDoc();j++) {
|
for(int j=0;j<reader.maxDoc();j++) {
|
||||||
reader.document(j);
|
reader.document(j);
|
||||||
reader.getTermFreqVectors(j);
|
reader.getTermFreqVectors(j);
|
||||||
|
|
|
@ -53,7 +53,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
public static int count(Term t, IndexReader r) throws IOException {
|
public static int count(Term t, IndexReader r) throws IOException {
|
||||||
int count = 0;
|
int count = 0;
|
||||||
DocsEnum td = MultiFields.getTermDocsEnum(r,
|
DocsEnum td = MultiFields.getTermDocsEnum(r,
|
||||||
MultiFields.getDeletedDocs(r),
|
MultiFields.getLiveDocs(r),
|
||||||
t.field(), new BytesRef(t.text()));
|
t.field(), new BytesRef(t.text()));
|
||||||
|
|
||||||
if (td != null) {
|
if (td != null) {
|
||||||
|
|
|
@ -203,7 +203,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
|
||||||
// Quick test to make sure index is not corrupt:
|
// Quick test to make sure index is not corrupt:
|
||||||
IndexReader reader = IndexReader.open(dir, true);
|
IndexReader reader = IndexReader.open(dir, true);
|
||||||
DocsEnum tdocs = MultiFields.getTermDocsEnum(reader,
|
DocsEnum tdocs = MultiFields.getTermDocsEnum(reader,
|
||||||
MultiFields.getDeletedDocs(reader),
|
MultiFields.getLiveDocs(reader),
|
||||||
"field",
|
"field",
|
||||||
new BytesRef("aaa"));
|
new BytesRef("aaa"));
|
||||||
int count = 0;
|
int count = 0;
|
||||||
|
@ -268,7 +268,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
|
||||||
|
|
||||||
if (success) {
|
if (success) {
|
||||||
IndexReader reader = IndexReader.open(dir, true);
|
IndexReader reader = IndexReader.open(dir, true);
|
||||||
final Bits delDocs = MultiFields.getDeletedDocs(reader);
|
final Bits delDocs = MultiFields.getLiveDocs(reader);
|
||||||
for(int j=0;j<reader.maxDoc();j++) {
|
for(int j=0;j<reader.maxDoc();j++) {
|
||||||
if (delDocs == null || !delDocs.get(j)) {
|
if (delDocs == null || !delDocs.get(j)) {
|
||||||
reader.document(j);
|
reader.document(j);
|
||||||
|
|
|
@ -155,7 +155,7 @@ public class TestLazyProxSkipping extends LuceneTestCase {
|
||||||
IndexReader reader = IndexReader.open(directory, true);
|
IndexReader reader = IndexReader.open(directory, true);
|
||||||
|
|
||||||
DocsAndPositionsEnum tp = MultiFields.getTermPositionsEnum(reader,
|
DocsAndPositionsEnum tp = MultiFields.getTermPositionsEnum(reader,
|
||||||
MultiFields.getDeletedDocs(reader),
|
MultiFields.getLiveDocs(reader),
|
||||||
this.field,
|
this.field,
|
||||||
new BytesRef("b"));
|
new BytesRef("b"));
|
||||||
|
|
||||||
|
@ -166,7 +166,7 @@ public class TestLazyProxSkipping extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
tp = MultiFields.getTermPositionsEnum(reader,
|
tp = MultiFields.getTermPositionsEnum(reader,
|
||||||
MultiFields.getDeletedDocs(reader),
|
MultiFields.getLiveDocs(reader),
|
||||||
this.field,
|
this.field,
|
||||||
new BytesRef("a"));
|
new BytesRef("a"));
|
||||||
|
|
||||||
|
|
|
@ -90,9 +90,9 @@ public class TestMultiFields extends LuceneTestCase {
|
||||||
w.close();
|
w.close();
|
||||||
//System.out.println("TEST reader=" + reader);
|
//System.out.println("TEST reader=" + reader);
|
||||||
|
|
||||||
Bits delDocs = MultiFields.getDeletedDocs(reader);
|
Bits liveDocs = MultiFields.getLiveDocs(reader);
|
||||||
for(int delDoc : deleted) {
|
for(int delDoc : deleted) {
|
||||||
assertTrue(delDocs.get(delDoc));
|
assertFalse(liveDocs.get(delDoc));
|
||||||
}
|
}
|
||||||
Terms terms2 = MultiFields.getTerms(reader, "field");
|
Terms terms2 = MultiFields.getTerms(reader, "field");
|
||||||
|
|
||||||
|
@ -102,7 +102,7 @@ public class TestMultiFields extends LuceneTestCase {
|
||||||
System.out.println("TEST: seek to term= "+ UnicodeUtil.toHexString(term.utf8ToString()));
|
System.out.println("TEST: seek to term= "+ UnicodeUtil.toHexString(term.utf8ToString()));
|
||||||
}
|
}
|
||||||
|
|
||||||
DocsEnum docsEnum = terms2.docs(delDocs, term, null);
|
DocsEnum docsEnum = terms2.docs(liveDocs, term, null);
|
||||||
assertNotNull(docsEnum);
|
assertNotNull(docsEnum);
|
||||||
|
|
||||||
for(int docID : docs.get(term)) {
|
for(int docID : docs.get(term)) {
|
||||||
|
@ -121,7 +121,7 @@ public class TestMultiFields extends LuceneTestCase {
|
||||||
/*
|
/*
|
||||||
private void verify(IndexReader r, String term, List<Integer> expected) throws Exception {
|
private void verify(IndexReader r, String term, List<Integer> expected) throws Exception {
|
||||||
DocsEnum docs = MultiFields.getTermDocsEnum(r,
|
DocsEnum docs = MultiFields.getTermDocsEnum(r,
|
||||||
MultiFields.getDeletedDocs(r),
|
MultiFields.getLiveDocs(r),
|
||||||
"field",
|
"field",
|
||||||
new BytesRef(term));
|
new BytesRef(term));
|
||||||
|
|
||||||
|
|
|
@ -86,7 +86,7 @@ public class TestMultiLevelSkipList extends LuceneTestCase {
|
||||||
|
|
||||||
for (int i = 0; i < 2; i++) {
|
for (int i = 0; i < 2; i++) {
|
||||||
counter = 0;
|
counter = 0;
|
||||||
DocsAndPositionsEnum tp = reader.termPositionsEnum(reader.getDeletedDocs(),
|
DocsAndPositionsEnum tp = reader.termPositionsEnum(reader.getLiveDocs(),
|
||||||
term.field(),
|
term.field(),
|
||||||
new BytesRef(term.text()));
|
new BytesRef(term.text()));
|
||||||
|
|
||||||
|
|
|
@ -106,11 +106,11 @@ public class TestNRTThreads extends LuceneTestCase {
|
||||||
System.out.println("TEST: now warm merged reader=" + reader);
|
System.out.println("TEST: now warm merged reader=" + reader);
|
||||||
}
|
}
|
||||||
final int maxDoc = reader.maxDoc();
|
final int maxDoc = reader.maxDoc();
|
||||||
final Bits delDocs = reader.getDeletedDocs();
|
final Bits liveDocs = reader.getLiveDocs();
|
||||||
int sum = 0;
|
int sum = 0;
|
||||||
final int inc = Math.max(1, maxDoc/50);
|
final int inc = Math.max(1, maxDoc/50);
|
||||||
for(int docID=0;docID<maxDoc;docID += inc) {
|
for(int docID=0;docID<maxDoc;docID += inc) {
|
||||||
if (delDocs == null || !delDocs.get(docID)) {
|
if (liveDocs == null || liveDocs.get(docID)) {
|
||||||
final Document doc = reader.document(docID);
|
final Document doc = reader.document(docID);
|
||||||
sum += doc.getFields().size();
|
sum += doc.getFields().size();
|
||||||
}
|
}
|
||||||
|
|
|
@ -80,7 +80,7 @@ public class TestParallelTermEnum extends LuceneTestCase {
|
||||||
pr.add(ir1);
|
pr.add(ir1);
|
||||||
pr.add(ir2);
|
pr.add(ir2);
|
||||||
|
|
||||||
Bits delDocs = pr.getDeletedDocs();
|
Bits liveDocs = pr.getLiveDocs();
|
||||||
|
|
||||||
FieldsEnum fe = pr.fields().iterator();
|
FieldsEnum fe = pr.fields().iterator();
|
||||||
|
|
||||||
|
@ -92,31 +92,31 @@ public class TestParallelTermEnum extends LuceneTestCase {
|
||||||
TermsEnum te = fe.terms();
|
TermsEnum te = fe.terms();
|
||||||
|
|
||||||
assertEquals("brown", te.next().utf8ToString());
|
assertEquals("brown", te.next().utf8ToString());
|
||||||
DocsEnum td = te.docs(delDocs, null);
|
DocsEnum td = te.docs(liveDocs, null);
|
||||||
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||||
assertEquals(0, td.docID());
|
assertEquals(0, td.docID());
|
||||||
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
||||||
|
|
||||||
assertEquals("fox", te.next().utf8ToString());
|
assertEquals("fox", te.next().utf8ToString());
|
||||||
td = te.docs(delDocs, td);
|
td = te.docs(liveDocs, td);
|
||||||
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||||
assertEquals(0, td.docID());
|
assertEquals(0, td.docID());
|
||||||
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
||||||
|
|
||||||
assertEquals("jumps", te.next().utf8ToString());
|
assertEquals("jumps", te.next().utf8ToString());
|
||||||
td = te.docs(delDocs, td);
|
td = te.docs(liveDocs, td);
|
||||||
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||||
assertEquals(0, td.docID());
|
assertEquals(0, td.docID());
|
||||||
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
||||||
|
|
||||||
assertEquals("quick", te.next().utf8ToString());
|
assertEquals("quick", te.next().utf8ToString());
|
||||||
td = te.docs(delDocs, td);
|
td = te.docs(liveDocs, td);
|
||||||
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||||
assertEquals(0, td.docID());
|
assertEquals(0, td.docID());
|
||||||
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
||||||
|
|
||||||
assertEquals("the", te.next().utf8ToString());
|
assertEquals("the", te.next().utf8ToString());
|
||||||
td = te.docs(delDocs, td);
|
td = te.docs(liveDocs, td);
|
||||||
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||||
assertEquals(0, td.docID());
|
assertEquals(0, td.docID());
|
||||||
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
||||||
|
@ -127,31 +127,31 @@ public class TestParallelTermEnum extends LuceneTestCase {
|
||||||
te = fe.terms();
|
te = fe.terms();
|
||||||
|
|
||||||
assertEquals("brown", te.next().utf8ToString());
|
assertEquals("brown", te.next().utf8ToString());
|
||||||
td = te.docs(delDocs, td);
|
td = te.docs(liveDocs, td);
|
||||||
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||||
assertEquals(0, td.docID());
|
assertEquals(0, td.docID());
|
||||||
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
||||||
|
|
||||||
assertEquals("fox", te.next().utf8ToString());
|
assertEquals("fox", te.next().utf8ToString());
|
||||||
td = te.docs(delDocs, td);
|
td = te.docs(liveDocs, td);
|
||||||
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||||
assertEquals(0, td.docID());
|
assertEquals(0, td.docID());
|
||||||
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
||||||
|
|
||||||
assertEquals("jumps", te.next().utf8ToString());
|
assertEquals("jumps", te.next().utf8ToString());
|
||||||
td = te.docs(delDocs, td);
|
td = te.docs(liveDocs, td);
|
||||||
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||||
assertEquals(0, td.docID());
|
assertEquals(0, td.docID());
|
||||||
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
||||||
|
|
||||||
assertEquals("quick", te.next().utf8ToString());
|
assertEquals("quick", te.next().utf8ToString());
|
||||||
td = te.docs(delDocs, td);
|
td = te.docs(liveDocs, td);
|
||||||
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||||
assertEquals(0, td.docID());
|
assertEquals(0, td.docID());
|
||||||
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
||||||
|
|
||||||
assertEquals("the", te.next().utf8ToString());
|
assertEquals("the", te.next().utf8ToString());
|
||||||
td = te.docs(delDocs, td);
|
td = te.docs(liveDocs, td);
|
||||||
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||||
assertEquals(0, td.docID());
|
assertEquals(0, td.docID());
|
||||||
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
||||||
|
@ -162,37 +162,37 @@ public class TestParallelTermEnum extends LuceneTestCase {
|
||||||
te = fe.terms();
|
te = fe.terms();
|
||||||
|
|
||||||
assertEquals("dog", te.next().utf8ToString());
|
assertEquals("dog", te.next().utf8ToString());
|
||||||
td = te.docs(delDocs, td);
|
td = te.docs(liveDocs, td);
|
||||||
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||||
assertEquals(0, td.docID());
|
assertEquals(0, td.docID());
|
||||||
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
||||||
|
|
||||||
assertEquals("fox", te.next().utf8ToString());
|
assertEquals("fox", te.next().utf8ToString());
|
||||||
td = te.docs(delDocs, td);
|
td = te.docs(liveDocs, td);
|
||||||
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||||
assertEquals(0, td.docID());
|
assertEquals(0, td.docID());
|
||||||
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
||||||
|
|
||||||
assertEquals("jumps", te.next().utf8ToString());
|
assertEquals("jumps", te.next().utf8ToString());
|
||||||
td = te.docs(delDocs, td);
|
td = te.docs(liveDocs, td);
|
||||||
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||||
assertEquals(0, td.docID());
|
assertEquals(0, td.docID());
|
||||||
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
||||||
|
|
||||||
assertEquals("lazy", te.next().utf8ToString());
|
assertEquals("lazy", te.next().utf8ToString());
|
||||||
td = te.docs(delDocs, td);
|
td = te.docs(liveDocs, td);
|
||||||
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||||
assertEquals(0, td.docID());
|
assertEquals(0, td.docID());
|
||||||
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
||||||
|
|
||||||
assertEquals("over", te.next().utf8ToString());
|
assertEquals("over", te.next().utf8ToString());
|
||||||
td = te.docs(delDocs, td);
|
td = te.docs(liveDocs, td);
|
||||||
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||||
assertEquals(0, td.docID());
|
assertEquals(0, td.docID());
|
||||||
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
||||||
|
|
||||||
assertEquals("the", te.next().utf8ToString());
|
assertEquals("the", te.next().utf8ToString());
|
||||||
td = te.docs(delDocs, td);
|
td = te.docs(liveDocs, td);
|
||||||
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
assertTrue(td.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||||
assertEquals(0, td.docID());
|
assertEquals(0, td.docID());
|
||||||
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
assertEquals(td.nextDoc(), DocsEnum.NO_MORE_DOCS);
|
||||||
|
|
|
@ -223,7 +223,7 @@ public class TestPayloads extends LuceneTestCase {
|
||||||
DocsAndPositionsEnum[] tps = new DocsAndPositionsEnum[numTerms];
|
DocsAndPositionsEnum[] tps = new DocsAndPositionsEnum[numTerms];
|
||||||
for (int i = 0; i < numTerms; i++) {
|
for (int i = 0; i < numTerms; i++) {
|
||||||
tps[i] = MultiFields.getTermPositionsEnum(reader,
|
tps[i] = MultiFields.getTermPositionsEnum(reader,
|
||||||
MultiFields.getDeletedDocs(reader),
|
MultiFields.getLiveDocs(reader),
|
||||||
terms[i].field(),
|
terms[i].field(),
|
||||||
new BytesRef(terms[i].text()));
|
new BytesRef(terms[i].text()));
|
||||||
}
|
}
|
||||||
|
@ -260,7 +260,7 @@ public class TestPayloads extends LuceneTestCase {
|
||||||
* test lazy skipping
|
* test lazy skipping
|
||||||
*/
|
*/
|
||||||
DocsAndPositionsEnum tp = MultiFields.getTermPositionsEnum(reader,
|
DocsAndPositionsEnum tp = MultiFields.getTermPositionsEnum(reader,
|
||||||
MultiFields.getDeletedDocs(reader),
|
MultiFields.getLiveDocs(reader),
|
||||||
terms[0].field(),
|
terms[0].field(),
|
||||||
new BytesRef(terms[0].text()));
|
new BytesRef(terms[0].text()));
|
||||||
tp.nextDoc();
|
tp.nextDoc();
|
||||||
|
@ -288,7 +288,7 @@ public class TestPayloads extends LuceneTestCase {
|
||||||
* Test different lengths at skip points
|
* Test different lengths at skip points
|
||||||
*/
|
*/
|
||||||
tp = MultiFields.getTermPositionsEnum(reader,
|
tp = MultiFields.getTermPositionsEnum(reader,
|
||||||
MultiFields.getDeletedDocs(reader),
|
MultiFields.getLiveDocs(reader),
|
||||||
terms[1].field(),
|
terms[1].field(),
|
||||||
new BytesRef(terms[1].text()));
|
new BytesRef(terms[1].text()));
|
||||||
tp.nextDoc();
|
tp.nextDoc();
|
||||||
|
@ -331,7 +331,7 @@ public class TestPayloads extends LuceneTestCase {
|
||||||
|
|
||||||
reader = IndexReader.open(dir, true);
|
reader = IndexReader.open(dir, true);
|
||||||
tp = MultiFields.getTermPositionsEnum(reader,
|
tp = MultiFields.getTermPositionsEnum(reader,
|
||||||
MultiFields.getDeletedDocs(reader),
|
MultiFields.getLiveDocs(reader),
|
||||||
fieldName,
|
fieldName,
|
||||||
new BytesRef(singleTerm));
|
new BytesRef(singleTerm));
|
||||||
tp.nextDoc();
|
tp.nextDoc();
|
||||||
|
@ -516,11 +516,11 @@ public class TestPayloads extends LuceneTestCase {
|
||||||
writer.close();
|
writer.close();
|
||||||
IndexReader reader = IndexReader.open(dir, true);
|
IndexReader reader = IndexReader.open(dir, true);
|
||||||
TermsEnum terms = MultiFields.getFields(reader).terms(field).iterator();
|
TermsEnum terms = MultiFields.getFields(reader).terms(field).iterator();
|
||||||
Bits delDocs = MultiFields.getDeletedDocs(reader);
|
Bits liveDocs = MultiFields.getLiveDocs(reader);
|
||||||
DocsAndPositionsEnum tp = null;
|
DocsAndPositionsEnum tp = null;
|
||||||
while (terms.next() != null) {
|
while (terms.next() != null) {
|
||||||
String termText = terms.term().utf8ToString();
|
String termText = terms.term().utf8ToString();
|
||||||
tp = terms.docsAndPositions(delDocs, tp);
|
tp = terms.docsAndPositions(liveDocs, tp);
|
||||||
while(tp.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
|
while(tp.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
|
||||||
int freq = tp.freq();
|
int freq = tp.freq();
|
||||||
for (int i = 0; i < freq; i++) {
|
for (int i = 0; i < freq; i++) {
|
||||||
|
|
|
@ -147,7 +147,7 @@ public class TestPerSegmentDeletes extends LuceneTestCase {
|
||||||
|
|
||||||
IndexReader r = writer.getReader();
|
IndexReader r = writer.getReader();
|
||||||
IndexReader r1 = r.getSequentialSubReaders()[0];
|
IndexReader r1 = r.getSequentialSubReaders()[0];
|
||||||
printDelDocs(r1.getDeletedDocs());
|
printDelDocs(r1.getLiveDocs());
|
||||||
int[] docs = toDocsArray(id3, null, r);
|
int[] docs = toDocsArray(id3, null, r);
|
||||||
System.out.println("id3 docs:"+Arrays.toString(docs));
|
System.out.println("id3 docs:"+Arrays.toString(docs));
|
||||||
// there shouldn't be any docs for id:3
|
// there shouldn't be any docs for id:3
|
||||||
|
|
|
@ -98,7 +98,7 @@ public class TestSegmentMerger extends LuceneTestCase {
|
||||||
assertTrue(DocHelper.numFields(newDoc2) == DocHelper.numFields(doc2) - DocHelper.unstored.size());
|
assertTrue(DocHelper.numFields(newDoc2) == DocHelper.numFields(doc2) - DocHelper.unstored.size());
|
||||||
|
|
||||||
DocsEnum termDocs = MultiFields.getTermDocsEnum(mergedReader,
|
DocsEnum termDocs = MultiFields.getTermDocsEnum(mergedReader,
|
||||||
MultiFields.getDeletedDocs(mergedReader),
|
MultiFields.getLiveDocs(mergedReader),
|
||||||
DocHelper.TEXT_FIELD_2_KEY,
|
DocHelper.TEXT_FIELD_2_KEY,
|
||||||
new BytesRef("field"));
|
new BytesRef("field"));
|
||||||
assertTrue(termDocs != null);
|
assertTrue(termDocs != null);
|
||||||
|
|
|
@ -81,7 +81,7 @@ public class TestSegmentReader extends LuceneTestCase {
|
||||||
assertTrue(deleteReader != null);
|
assertTrue(deleteReader != null);
|
||||||
assertTrue(deleteReader.numDocs() == 1);
|
assertTrue(deleteReader.numDocs() == 1);
|
||||||
deleteReader.deleteDocument(0);
|
deleteReader.deleteDocument(0);
|
||||||
assertTrue(deleteReader.getDeletedDocs().get(0));
|
assertFalse(deleteReader.getLiveDocs().get(0));
|
||||||
assertTrue(deleteReader.hasDeletions() == true);
|
assertTrue(deleteReader.hasDeletions() == true);
|
||||||
assertTrue(deleteReader.numDocs() == 0);
|
assertTrue(deleteReader.numDocs() == 0);
|
||||||
deleteReader.close();
|
deleteReader.close();
|
||||||
|
@ -131,13 +131,13 @@ public class TestSegmentReader extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
DocsEnum termDocs = MultiFields.getTermDocsEnum(reader,
|
DocsEnum termDocs = MultiFields.getTermDocsEnum(reader,
|
||||||
MultiFields.getDeletedDocs(reader),
|
MultiFields.getLiveDocs(reader),
|
||||||
DocHelper.TEXT_FIELD_1_KEY,
|
DocHelper.TEXT_FIELD_1_KEY,
|
||||||
new BytesRef("field"));
|
new BytesRef("field"));
|
||||||
assertTrue(termDocs.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
assertTrue(termDocs.nextDoc() != DocsEnum.NO_MORE_DOCS);
|
||||||
|
|
||||||
termDocs = MultiFields.getTermDocsEnum(reader,
|
termDocs = MultiFields.getTermDocsEnum(reader,
|
||||||
MultiFields.getDeletedDocs(reader),
|
MultiFields.getLiveDocs(reader),
|
||||||
DocHelper.NO_NORMS_KEY,
|
DocHelper.NO_NORMS_KEY,
|
||||||
new BytesRef(DocHelper.NO_NORMS_TEXT));
|
new BytesRef(DocHelper.NO_NORMS_TEXT));
|
||||||
|
|
||||||
|
@ -145,7 +145,7 @@ public class TestSegmentReader extends LuceneTestCase {
|
||||||
|
|
||||||
|
|
||||||
DocsAndPositionsEnum positions = MultiFields.getTermPositionsEnum(reader,
|
DocsAndPositionsEnum positions = MultiFields.getTermPositionsEnum(reader,
|
||||||
MultiFields.getDeletedDocs(reader),
|
MultiFields.getLiveDocs(reader),
|
||||||
DocHelper.TEXT_FIELD_1_KEY,
|
DocHelper.TEXT_FIELD_1_KEY,
|
||||||
new BytesRef("field"));
|
new BytesRef("field"));
|
||||||
// NOTE: prior rev of this test was failing to first
|
// NOTE: prior rev of this test was failing to first
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue