LUCENE-6980: default applyDeletes to true when opening NRT readers

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1725160 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2016-01-18 00:43:44 +00:00
parent 170f0731f5
commit 40d290ee84
76 changed files with 245 additions and 178 deletions

View File

@ -146,6 +146,9 @@ API Changes
analysis/common. TokenTypeSinkFilter was removed (use TypeTokenFilter instead). analysis/common. TokenTypeSinkFilter was removed (use TypeTokenFilter instead).
TokenRangeSinkFilter was removed. (Shai Erera, Uwe Schindler) TokenRangeSinkFilter was removed. (Shai Erera, Uwe Schindler)
* LUCENE-6980: Default applyAllDeletes to true when opening
near-real-time readers (Mike McCandless)
Optimizations Optimizations
* LUCENE-6951: Improve GeoPointInPolygonQuery using point orientation based * LUCENE-6951: Improve GeoPointInPolygonQuery using point orientation based

View File

@ -1381,7 +1381,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)); .setOpenMode(OpenMode.APPEND));
writer.addDocument(new Document()); writer.addDocument(new Document());
DirectoryReader r = DirectoryReader.open(writer, true); DirectoryReader r = DirectoryReader.open(writer);
writer.commit(); writer.commit();
r.close(); r.close();
writer.forceMerge(1); writer.forceMerge(1);

View File

@ -60,7 +60,7 @@ public class NearRealtimeReaderTask extends PerfTask {
} }
long t = System.currentTimeMillis(); long t = System.currentTimeMillis();
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
runData.setIndexReader(r); runData.setIndexReader(r);
// Transfer our reference to runData // Transfer our reference to runData
r.decRef(); r.decRef();

View File

@ -114,7 +114,7 @@ public class TestAutoPrefixTerms extends LuceneTestCase {
} }
if (VERBOSE) System.out.println("\nTEST: now done"); if (VERBOSE) System.out.println("\nTEST: now done");
IndexReader r = DirectoryReader.open(w, true); IndexReader r = DirectoryReader.open(w);
List<String> sortedTerms = new ArrayList<>(terms); List<String> sortedTerms = new ArrayList<>(terms);
Collections.sort(sortedTerms); Collections.sort(sortedTerms);
@ -242,7 +242,7 @@ public class TestAutoPrefixTerms extends LuceneTestCase {
w.forceMerge(1); w.forceMerge(1);
} }
IndexReader r = DirectoryReader.open(w, true); IndexReader r = DirectoryReader.open(w);
List<Integer> sortedTerms = new ArrayList<>(terms); List<Integer> sortedTerms = new ArrayList<>(terms);
Collections.sort(sortedTerms); Collections.sort(sortedTerms);
@ -365,7 +365,7 @@ public class TestAutoPrefixTerms extends LuceneTestCase {
w.forceMerge(1); w.forceMerge(1);
} }
IndexReader r = DirectoryReader.open(w, true); IndexReader r = DirectoryReader.open(w);
List<String> sortedTerms = new ArrayList<>(terms); List<String> sortedTerms = new ArrayList<>(terms);
Collections.sort(sortedTerms); Collections.sort(sortedTerms);
@ -475,7 +475,7 @@ public class TestAutoPrefixTerms extends LuceneTestCase {
w.forceMerge(1); w.forceMerge(1);
} }
IndexReader r = DirectoryReader.open(w, true); IndexReader r = DirectoryReader.open(w);
Terms terms = MultiFields.getTerms(r, "field"); Terms terms = MultiFields.getTerms(r, "field");
if (VERBOSE) { if (VERBOSE) {
System.out.println("\nTEST: now intersect"); System.out.println("\nTEST: now intersect");

View File

@ -249,7 +249,7 @@ public class TestOrdsBlockTree extends BasePostingsFormatTestCase {
w.addDocument(doc); w.addDocument(doc);
} }
w.forceMerge(1); w.forceMerge(1);
IndexReader r = DirectoryReader.open(w, true); IndexReader r = DirectoryReader.open(w);
TermsEnum te = MultiFields.getTerms(r, "field").iterator(); TermsEnum te = MultiFields.getTerms(r, "field").iterator();
if (VERBOSE) { if (VERBOSE) {
@ -299,7 +299,7 @@ public class TestOrdsBlockTree extends BasePostingsFormatTestCase {
w.addDocument(doc); w.addDocument(doc);
} }
w.forceMerge(1); w.forceMerge(1);
IndexReader r = DirectoryReader.open(w, true); IndexReader r = DirectoryReader.open(w);
TermsEnum te = MultiFields.getTerms(r, "field").iterator(); TermsEnum te = MultiFields.getTerms(r, "field").iterator();
BytesRef term; BytesRef term;
@ -337,7 +337,7 @@ public class TestOrdsBlockTree extends BasePostingsFormatTestCase {
} }
} }
w.forceMerge(1); w.forceMerge(1);
IndexReader r = DirectoryReader.open(w, true); IndexReader r = DirectoryReader.open(w);
TermsEnum te = MultiFields.getTerms(r, "body").iterator(); TermsEnum te = MultiFields.getTerms(r, "body").iterator();
for(int i=0;i<30;i++) { for(int i=0;i<30;i++) {

View File

@ -67,6 +67,23 @@ public abstract class DirectoryReader extends BaseCompositeReader<LeafReader> {
* Open a near real time IndexReader from the {@link org.apache.lucene.index.IndexWriter}. * Open a near real time IndexReader from the {@link org.apache.lucene.index.IndexWriter}.
* *
* @param writer The IndexWriter to open from * @param writer The IndexWriter to open from
* @return The new IndexReader
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*
* @see #openIfChanged(DirectoryReader,IndexWriter,boolean)
*
* @lucene.experimental
*/
public static DirectoryReader open(final IndexWriter writer) throws IOException {
return open(writer, true);
}
/**
* Expert: open a near real time IndexReader from the {@link org.apache.lucene.index.IndexWriter},
* controlling whether past deletions should be applied.
*
* @param writer The IndexWriter to open from
* @param applyAllDeletes If true, all buffered deletes will * @param applyAllDeletes If true, all buffered deletes will
* be applied (made visible) in the returned reader. If * be applied (made visible) in the returned reader. If
* false, the deletes are not applied but remain buffered * false, the deletes are not applied but remain buffered
@ -74,11 +91,8 @@ public abstract class DirectoryReader extends BaseCompositeReader<LeafReader> {
* future. Applying deletes can be costly, so if your app * future. Applying deletes can be costly, so if your app
* can tolerate deleted documents being returned you might * can tolerate deleted documents being returned you might
* gain some performance by passing false. * gain some performance by passing false.
* @return The new IndexReader
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
* *
* @see #openIfChanged(DirectoryReader,IndexWriter,boolean) * @see #open(IndexWriter)
* *
* @lucene.experimental * @lucene.experimental
*/ */
@ -187,6 +201,21 @@ public abstract class DirectoryReader extends BaseCompositeReader<LeafReader> {
* *
* @param writer The IndexWriter to open from * @param writer The IndexWriter to open from
* *
* @throws IOException if there is a low-level IO error
*
* @lucene.experimental
*/
public static DirectoryReader openIfChanged(DirectoryReader oldReader, IndexWriter writer) throws IOException {
return openIfChanged(oldReader, writer, true);
}
/**
* Expert: Opens a new reader, if there are any changes, controlling whether past deletions should be applied.
*
* @see #openIfChanged(DirectoryReader,IndexWriter)
*
* @param writer The IndexWriter to open from
*
* @param applyAllDeletes If true, all buffered deletes will * @param applyAllDeletes If true, all buffered deletes will
* be applied (made visible) in the returned reader. If * be applied (made visible) in the returned reader. If
* false, the deletes are not applied but remain buffered * false, the deletes are not applied but remain buffered
@ -368,7 +397,7 @@ public abstract class DirectoryReader extends BaseCompositeReader<LeafReader> {
* *
* <p>If instead this reader is a near real-time reader * <p>If instead this reader is a near real-time reader
* (ie, obtained by a call to {@link * (ie, obtained by a call to {@link
* DirectoryReader#open(IndexWriter,boolean)}, or by calling {@link #openIfChanged} * DirectoryReader#open(IndexWriter)}, or by calling {@link #openIfChanged}
* on a near real-time reader), then this method checks if * on a near real-time reader), then this method checks if
* either a new commit has occurred, or any new * either a new commit has occurred, or any new
* uncommitted changes have taken place via the writer. * uncommitted changes have taken place via the writer.

View File

@ -37,7 +37,7 @@ import org.apache.lucene.util.IOUtils;
point-in-time view of an index. Any changes made to the index point-in-time view of an index. Any changes made to the index
via {@link IndexWriter} will not be visible until a new via {@link IndexWriter} will not be visible until a new
{@code IndexReader} is opened. It's best to use {@link {@code IndexReader} is opened. It's best to use {@link
DirectoryReader#open(IndexWriter,boolean)} to obtain an DirectoryReader#open(IndexWriter)} to obtain an
{@code IndexReader}, if your {@link IndexWriter} is {@code IndexReader}, if your {@link IndexWriter} is
in-process. When you need to re-open to see changes to the in-process. When you need to re-open to see changes to the
index, it's best to use {@link DirectoryReader#openIfChanged(DirectoryReader)} index, it's best to use {@link DirectoryReader#openIfChanged(DirectoryReader)}

View File

@ -1319,7 +1319,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
/** Expert: attempts to delete by document ID, as long as /** Expert: attempts to delete by document ID, as long as
* the provided reader is a near-real-time reader (from {@link * the provided reader is a near-real-time reader (from {@link
* DirectoryReader#open(IndexWriter,boolean)}). If the * DirectoryReader#open(IndexWriter)}). If the
* provided reader is an NRT reader obtained from this * provided reader is an NRT reader obtained from this
* writer, and its segment has not been merged away, then * writer, and its segment has not been merged away, then
* the delete succeeds and this method returns true; else, it * the delete succeeds and this method returns true; else, it
@ -4478,7 +4478,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
} }
} }
/** If {@link DirectoryReader#open(IndexWriter,boolean)} has /** If {@link DirectoryReader#open(IndexWriter)} has
* been called (ie, this writer is in near real-time * been called (ie, this writer is in near real-time
* mode), then after a merge completes, this class can be * mode), then after a merge completes, this class can be
* invoked to warm the reader on the newly merged * invoked to warm the reader on the newly merged

View File

@ -283,11 +283,11 @@ public final class IndexWriterConfig extends LiveIndexWriterConfig {
/** By default, IndexWriter does not pool the /** By default, IndexWriter does not pool the
* SegmentReaders it must open for deletions and * SegmentReaders it must open for deletions and
* merging, unless a near-real-time reader has been * merging, unless a near-real-time reader has been
* obtained by calling {@link DirectoryReader#open(IndexWriter, boolean)}. * obtained by calling {@link DirectoryReader#open(IndexWriter)}.
* This method lets you enable pooling without getting a * This method lets you enable pooling without getting a
* near-real-time reader. NOTE: if you set this to * near-real-time reader. NOTE: if you set this to
* false, IndexWriter will still pool readers once * false, IndexWriter will still pool readers once
* {@link DirectoryReader#open(IndexWriter, boolean)} is called. * {@link DirectoryReader#open(IndexWriter)} is called.
* *
* <p>Only takes effect when IndexWriter is first created. */ * <p>Only takes effect when IndexWriter is first created. */
public IndexWriterConfig setReaderPooling(boolean readerPooling) { public IndexWriterConfig setReaderPooling(boolean readerPooling) {

View File

@ -373,7 +373,7 @@ public class LiveIndexWriterConfig {
/** /**
* Returns {@code true} if {@link IndexWriter} should pool readers even if * Returns {@code true} if {@link IndexWriter} should pool readers even if
* {@link DirectoryReader#open(IndexWriter, boolean)} has not been called. * {@link DirectoryReader#open(IndexWriter)} has not been called.
*/ */
public boolean getReaderPooling() { public boolean getReaderPooling() {
return readerPooling; return readerPooling;

View File

@ -41,6 +41,19 @@ public final class ReaderManager extends ReferenceManager<DirectoryReader> {
* *
* @param writer * @param writer
* the IndexWriter to open the IndexReader from. * the IndexWriter to open the IndexReader from.
*
* @throws IOException If there is a low-level I/O error
*/
public ReaderManager(IndexWriter writer) throws IOException {
this(writer, true);
}
/**
* Expert: creates and returns a new ReaderManager from the given
* {@link IndexWriter}, controlling whether past deletions should be applied.
*
* @param writer
* the IndexWriter to open the IndexReader from.
* @param applyAllDeletes * @param applyAllDeletes
* If <code>true</code>, all buffered deletes will be applied (made * If <code>true</code>, all buffered deletes will be applied (made
* visible) in the {@link IndexSearcher} / {@link DirectoryReader}. * visible) in the {@link IndexSearcher} / {@link DirectoryReader}.

View File

@ -63,7 +63,7 @@ import org.apache.lucene.util.ThreadInterruptedException;
* to obtain a new reader and * to obtain a new reader and
* then create a new IndexSearcher from that. Also, for * then create a new IndexSearcher from that. Also, for
* low-latency turnaround it's best to use a near-real-time * low-latency turnaround it's best to use a near-real-time
* reader ({@link DirectoryReader#open(IndexWriter,boolean)}). * reader ({@link DirectoryReader#open(IndexWriter)}).
* Once you have a new {@link IndexReader}, it's relatively * Once you have a new {@link IndexReader}, it's relatively
* cheap to create a new IndexSearcher from it. * cheap to create a new IndexSearcher from it.
* *

View File

@ -65,6 +65,23 @@ public final class SearcherManager extends ReferenceManager<IndexSearcher> {
* *
* @param writer * @param writer
* the IndexWriter to open the IndexReader from. * the IndexWriter to open the IndexReader from.
* @param searcherFactory
* An optional {@link SearcherFactory}. Pass <code>null</code> if you
* don't require the searcher to be warmed before going live or other
* custom behavior.
*
* @throws IOException if there is a low-level I/O error
*/
public SearcherManager(IndexWriter writer, SearcherFactory searcherFactory) throws IOException {
this(writer, true, searcherFactory);
}
/**
* Expert: creates and returns a new SearcherManager from the given
* {@link IndexWriter}, controlling whether past deletions should be applied.
*
* @param writer
* the IndexWriter to open the IndexReader from.
* @param applyAllDeletes * @param applyAllDeletes
* If <code>true</code>, all buffered deletes will be applied (made * If <code>true</code>, all buffered deletes will be applied (made
* visible) in the {@link IndexSearcher} / {@link DirectoryReader}. * visible) in the {@link IndexSearcher} / {@link DirectoryReader}.

View File

@ -96,7 +96,7 @@ public class TestExternalCodecs extends LuceneTestCase {
} }
w.deleteDocuments(new Term("id", "77")); w.deleteDocuments(new Term("id", "77"));
IndexReader r = DirectoryReader.open(w, true); IndexReader r = DirectoryReader.open(w);
assertEquals(NUM_DOCS-1, r.numDocs()); assertEquals(NUM_DOCS-1, r.numDocs());
IndexSearcher s = newSearcher(r); IndexSearcher s = newSearcher(r);
@ -116,7 +116,7 @@ public class TestExternalCodecs extends LuceneTestCase {
if (VERBOSE) { if (VERBOSE) {
System.out.println("\nTEST: now open reader"); System.out.println("\nTEST: now open reader");
} }
r = DirectoryReader.open(w, true); r = DirectoryReader.open(w);
assertEquals(NUM_DOCS-2, r.maxDoc()); assertEquals(NUM_DOCS-2, r.maxDoc());
assertEquals(NUM_DOCS-2, r.numDocs()); assertEquals(NUM_DOCS-2, r.numDocs());
s = newSearcher(r); s = newSearcher(r);

View File

@ -53,7 +53,7 @@ public class TestBlockPostingsFormat extends BasePostingsFormatTestCase {
} }
w.forceMerge(1); w.forceMerge(1);
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
assertEquals(1, r.leaves().size()); assertEquals(1, r.leaves().size());
FieldReader field = (FieldReader) r.leaves().get(0).reader().fields().terms("field"); FieldReader field = (FieldReader) r.leaves().get(0).reader().fields().terms("field");
// We should see exactly two blocks: one root block (prefix empty string) and one block for z* terms (prefix z): // We should see exactly two blocks: one root block (prefix empty string) and one block for z* terms (prefix z):

View File

@ -539,7 +539,7 @@ public class TestLucene54DocValuesFormat extends BaseCompressingDocValuesFormatT
} }
out.close(); out.close();
w.forceMerge(1); w.forceMerge(1);
DirectoryReader r = DirectoryReader.open(w, false); DirectoryReader r = DirectoryReader.open(w);
w.close(); w.close();
SegmentReader sr = getOnlySegmentReader(r); SegmentReader sr = getOnlySegmentReader(r);
assertEquals(maxDoc, sr.maxDoc()); assertEquals(maxDoc, sr.maxDoc());
@ -589,7 +589,7 @@ public class TestLucene54DocValuesFormat extends BaseCompressingDocValuesFormatT
} }
out.close(); out.close();
w.forceMerge(1); w.forceMerge(1);
DirectoryReader r = DirectoryReader.open(w, false); DirectoryReader r = DirectoryReader.open(w);
w.close(); w.close();
SegmentReader sr = getOnlySegmentReader(r); SegmentReader sr = getOnlySegmentReader(r);
assertEquals(maxDoc, sr.maxDoc()); assertEquals(maxDoc, sr.maxDoc());

View File

@ -124,7 +124,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.close(); writer.close();
reader = DirectoryReader.open(dir); reader = DirectoryReader.open(dir);
} else { // NRT } else { // NRT
reader = DirectoryReader.open(writer, true); reader = DirectoryReader.open(writer);
writer.close(); writer.close();
} }
@ -166,7 +166,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.close(); writer.close();
reader = DirectoryReader.open(dir); reader = DirectoryReader.open(dir);
} else { // NRT } else { // NRT
reader = DirectoryReader.open(writer, true); reader = DirectoryReader.open(writer);
writer.close(); writer.close();
} }
@ -195,7 +195,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
final boolean isNRT = random().nextBoolean(); final boolean isNRT = random().nextBoolean();
final DirectoryReader reader1; final DirectoryReader reader1;
if (isNRT) { if (isNRT) {
reader1 = DirectoryReader.open(writer, true); reader1 = DirectoryReader.open(writer);
} else { } else {
writer.commit(); writer.commit();
reader1 = DirectoryReader.open(dir); reader1 = DirectoryReader.open(dir);
@ -249,7 +249,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.close(); writer.close();
reader = DirectoryReader.open(dir); reader = DirectoryReader.open(dir);
} else { // NRT } else { // NRT
reader = DirectoryReader.open(writer, true); reader = DirectoryReader.open(writer);
writer.close(); writer.close();
} }
@ -293,7 +293,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.close(); writer.close();
reader = DirectoryReader.open(dir); reader = DirectoryReader.open(dir);
} else { // NRT } else { // NRT
reader = DirectoryReader.open(writer, true); reader = DirectoryReader.open(writer);
writer.close(); writer.close();
} }
@ -567,7 +567,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.commit(); writer.commit();
reader = DirectoryReader.open(dir); reader = DirectoryReader.open(dir);
} else { } else {
reader = DirectoryReader.open(writer, true); reader = DirectoryReader.open(writer);
} }
assertEquals(1, reader.leaves().size()); assertEquals(1, reader.leaves().size());
@ -625,7 +625,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
final boolean isNRT = random.nextBoolean(); final boolean isNRT = random.nextBoolean();
DirectoryReader reader; DirectoryReader reader;
if (isNRT) { if (isNRT) {
reader = DirectoryReader.open(writer, true); reader = DirectoryReader.open(writer);
} else { } else {
writer.commit(); writer.commit();
reader = DirectoryReader.open(dir); reader = DirectoryReader.open(dir);
@ -892,7 +892,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
if (random.nextDouble() < 0.1) { // reopen NRT reader (apply updates), on average once every 10 updates if (random.nextDouble() < 0.1) { // reopen NRT reader (apply updates), on average once every 10 updates
if (reader == null) { if (reader == null) {
// System.out.println("[" + Thread.currentThread().getName() + "] open NRT"); // System.out.println("[" + Thread.currentThread().getName() + "] open NRT");
reader = DirectoryReader.open(writer, true); reader = DirectoryReader.open(writer);
} else { } else {
// System.out.println("[" + Thread.currentThread().getName() + "] reopen NRT"); // System.out.println("[" + Thread.currentThread().getName() + "] reopen NRT");
DirectoryReader r2 = DirectoryReader.openIfChanged(reader, writer, true); DirectoryReader r2 = DirectoryReader.openIfChanged(reader, writer, true);
@ -972,7 +972,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
Term t = new Term("id", "doc" + doc); Term t = new Term("id", "doc" + doc);
long value = random().nextLong(); long value = random().nextLong();
writer.updateDocValues(t, new BinaryDocValuesField("f", toBytes(value)), new BinaryDocValuesField("cf", toBytes(value*2))); writer.updateDocValues(t, new BinaryDocValuesField("f", toBytes(value)), new BinaryDocValuesField("cf", toBytes(value*2)));
DirectoryReader reader = DirectoryReader.open(writer, true); DirectoryReader reader = DirectoryReader.open(writer);
for (LeafReaderContext context : reader.leaves()) { for (LeafReaderContext context : reader.leaves()) {
LeafReader r = context.reader(); LeafReader r = context.reader();
BinaryDocValues fbdv = r.getBinaryDocValues("f"); BinaryDocValues fbdv = r.getBinaryDocValues("f");
@ -1291,7 +1291,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
conf.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH); conf.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
writer = new IndexWriter(cachingDir, conf); writer = new IndexWriter(cachingDir, conf);
writer.updateBinaryDocValue(new Term("id", "doc-0"), "val", toBytes(100L)); writer.updateBinaryDocValue(new Term("id", "doc-0"), "val", toBytes(100L));
DirectoryReader reader = DirectoryReader.open(writer, true); // flush DirectoryReader reader = DirectoryReader.open(writer); // flush
assertEquals(0, cachingDir.listCachedFiles().length); assertEquals(0, cachingDir.listCachedFiles().length);
IOUtils.close(reader, writer, cachingDir); IOUtils.close(reader, writer, cachingDir);

View File

@ -140,7 +140,7 @@ public class TestDemoParallelLeafReader extends LuceneTestCase {
// start with empty commit: // start with empty commit:
w.commit(); w.commit();
mgr = new ReaderManager(new ParallelLeafDirectoryReader(DirectoryReader.open(w, true))); mgr = new ReaderManager(new ParallelLeafDirectoryReader(DirectoryReader.open(w)));
} }
protected abstract IndexWriterConfig getIndexWriterConfig() throws IOException; protected abstract IndexWriterConfig getIndexWriterConfig() throws IOException;

View File

@ -611,7 +611,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
List<IndexCommit> commits = DirectoryReader.listCommits(dir); List<IndexCommit> commits = DirectoryReader.listCommits(dir);
assertEquals(1, commits.size()); assertEquals(1, commits.size());
w.addDocument(doc); w.addDocument(doc);
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
assertEquals(2, r.numDocs()); assertEquals(2, r.numDocs());
IndexReader r2 = DirectoryReader.openIfChanged(r, commits.get(0)); IndexReader r2 = DirectoryReader.openIfChanged(r, commits.get(0));

View File

@ -40,7 +40,7 @@ public class TestDocValues extends LuceneTestCase {
Directory dir = newDirectory(); Directory dir = newDirectory();
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null)); IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null));
iw.addDocument(new Document()); iw.addDocument(new Document());
DirectoryReader dr = DirectoryReader.open(iw, true); DirectoryReader dr = DirectoryReader.open(iw);
LeafReader r = getOnlySegmentReader(dr); LeafReader r = getOnlySegmentReader(dr);
// ok // ok
@ -65,7 +65,7 @@ public class TestDocValues extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
doc.add(new StringField("foo", "bar", Field.Store.NO)); doc.add(new StringField("foo", "bar", Field.Store.NO));
iw.addDocument(doc); iw.addDocument(doc);
DirectoryReader dr = DirectoryReader.open(iw, true); DirectoryReader dr = DirectoryReader.open(iw);
LeafReader r = getOnlySegmentReader(dr); LeafReader r = getOnlySegmentReader(dr);
// errors // errors
@ -108,7 +108,7 @@ public class TestDocValues extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
doc.add(new NumericDocValuesField("foo", 3)); doc.add(new NumericDocValuesField("foo", 3));
iw.addDocument(doc); iw.addDocument(doc);
DirectoryReader dr = DirectoryReader.open(iw, true); DirectoryReader dr = DirectoryReader.open(iw);
LeafReader r = getOnlySegmentReader(dr); LeafReader r = getOnlySegmentReader(dr);
// ok // ok
@ -144,7 +144,7 @@ public class TestDocValues extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
doc.add(new BinaryDocValuesField("foo", new BytesRef("bar"))); doc.add(new BinaryDocValuesField("foo", new BytesRef("bar")));
iw.addDocument(doc); iw.addDocument(doc);
DirectoryReader dr = DirectoryReader.open(iw, true); DirectoryReader dr = DirectoryReader.open(iw);
LeafReader r = getOnlySegmentReader(dr); LeafReader r = getOnlySegmentReader(dr);
// ok // ok
@ -183,7 +183,7 @@ public class TestDocValues extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
doc.add(new SortedDocValuesField("foo", new BytesRef("bar"))); doc.add(new SortedDocValuesField("foo", new BytesRef("bar")));
iw.addDocument(doc); iw.addDocument(doc);
DirectoryReader dr = DirectoryReader.open(iw, true); DirectoryReader dr = DirectoryReader.open(iw);
LeafReader r = getOnlySegmentReader(dr); LeafReader r = getOnlySegmentReader(dr);
// ok // ok
@ -216,7 +216,7 @@ public class TestDocValues extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
doc.add(new SortedSetDocValuesField("foo", new BytesRef("bar"))); doc.add(new SortedSetDocValuesField("foo", new BytesRef("bar")));
iw.addDocument(doc); iw.addDocument(doc);
DirectoryReader dr = DirectoryReader.open(iw, true); DirectoryReader dr = DirectoryReader.open(iw);
LeafReader r = getOnlySegmentReader(dr); LeafReader r = getOnlySegmentReader(dr);
// ok // ok
@ -255,7 +255,7 @@ public class TestDocValues extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
doc.add(new SortedNumericDocValuesField("foo", 3)); doc.add(new SortedNumericDocValuesField("foo", 3));
iw.addDocument(doc); iw.addDocument(doc);
DirectoryReader dr = DirectoryReader.open(iw, true); DirectoryReader dr = DirectoryReader.open(iw);
LeafReader r = getOnlySegmentReader(dr); LeafReader r = getOnlySegmentReader(dr);
// ok // ok

View File

@ -57,7 +57,7 @@ public class TestFilterDirectoryReader extends LuceneTestCase {
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig()); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
w.addDocument(new Document()); w.addDocument(new Document());
DirectoryReader reader = DirectoryReader.open(w, false); DirectoryReader reader = DirectoryReader.open(w);
DirectoryReader wrapped = new DummyFilterDirectoryReader(reader); DirectoryReader wrapped = new DummyFilterDirectoryReader(reader);
// Calling close() on the original reader and wrapped reader should only close // Calling close() on the original reader and wrapped reader should only close

View File

@ -1669,7 +1669,7 @@ public class TestIndexWriter extends LuceneTestCase {
w.addDocument(doc); w.addDocument(doc);
} }
w.commit(); w.commit();
DirectoryReader.open(w, true).close(); DirectoryReader.open(w).close();
w.deleteAll(); w.deleteAll();
w.commit(); w.commit();
@ -2643,7 +2643,7 @@ public class TestIndexWriter extends LuceneTestCase {
w.commit(); w.commit();
// newly opened NRT reader should see gen=1 segments file // newly opened NRT reader should see gen=1 segments file
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
assertEquals(1, r.getIndexCommit().getGeneration()); assertEquals(1, r.getIndexCommit().getGeneration());
assertEquals("segments_1", r.getIndexCommit().getSegmentsFileName()); assertEquals("segments_1", r.getIndexCommit().getSegmentsFileName());
@ -2683,7 +2683,7 @@ public class TestIndexWriter extends LuceneTestCase {
w.commit(); w.commit();
w.addDocument(new Document()); w.addDocument(new Document());
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
w.commit(); w.commit();
// commit even with no other changes counts as a "change" that NRT reader reopen will see: // commit even with no other changes counts as a "change" that NRT reader reopen will see:
@ -2702,7 +2702,7 @@ public class TestIndexWriter extends LuceneTestCase {
IndexWriter w = new IndexWriter(dir, iwc); IndexWriter w = new IndexWriter(dir, iwc);
w.commit(); w.commit();
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
Map<String,String> m = new HashMap<>(); Map<String,String> m = new HashMap<>();
m.put("foo", "bar"); m.put("foo", "bar");
w.setCommitData(m); w.setCommitData(m);
@ -2721,7 +2721,7 @@ public class TestIndexWriter extends LuceneTestCase {
IndexWriter w = new IndexWriter(dir, iwc); IndexWriter w = new IndexWriter(dir, iwc);
w.commit(); w.commit();
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
Map<String,String> m = new HashMap<>(); Map<String,String> m = new HashMap<>();
m.put("foo", "bar"); m.put("foo", "bar");
w.setCommitData(m); w.setCommitData(m);
@ -2741,7 +2741,7 @@ public class TestIndexWriter extends LuceneTestCase {
w.addDocument(new Document()); w.addDocument(new Document());
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
w.commit(); w.commit();
assertFalse(r.isCurrent()); assertFalse(r.isCurrent());

View File

@ -1311,8 +1311,8 @@ public class TestIndexWriterDelete extends LuceneTestCase {
} }
// First one triggers, but does not reflect, the merge: // First one triggers, but does not reflect, the merge:
DirectoryReader.open(w, true).close(); DirectoryReader.open(w).close();
IndexReader r =DirectoryReader.open(w, true); IndexReader r = DirectoryReader.open(w);
assertEquals(1, r.leaves().size()); assertEquals(1, r.leaves().size());
r.close(); r.close();
@ -1371,7 +1371,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
w.deleteDocuments(new Term("id", ""+i)); w.deleteDocuments(new Term("id", ""+i));
} }
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
assertEquals(0, r.leaves().size()); assertEquals(0, r.leaves().size());
assertEquals(0, r.maxDoc()); assertEquals(0, r.maxDoc());
r.close(); r.close();
@ -1406,7 +1406,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
w.forceMerge(1); w.forceMerge(1);
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
assertEquals(1, r.leaves().size()); assertEquals(1, r.leaves().size());
r.close(); r.close();

View File

@ -33,7 +33,7 @@ public class TestIndexWriterDeleteByQuery extends LuceneTestCase {
// Norms are disabled: // Norms are disabled:
doc.add(newStringField("field", "foo", Field.Store.NO)); doc.add(newStringField("field", "foo", Field.Store.NO));
w.addDocument(doc); w.addDocument(doc);
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
FieldInfo fi = MultiFields.getMergedFieldInfos(r).fieldInfo("field"); FieldInfo fi = MultiFields.getMergedFieldInfos(r).fieldInfo("field");
assertNotNull(fi); assertNotNull(fi);
assertFalse(fi.hasNorms()); assertFalse(fi.hasNorms());

View File

@ -1795,7 +1795,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
} catch (UnsupportedOperationException expected) { } catch (UnsupportedOperationException expected) {
// expected // expected
} }
DirectoryReader ir = DirectoryReader.open(iw, false); DirectoryReader ir = DirectoryReader.open(iw);
assertEquals(1, ir.numDocs()); assertEquals(1, ir.numDocs());
assertEquals("sometext", ir.document(0).get("field1")); assertEquals("sometext", ir.document(0).get("field1"));
ir.close(); ir.close();
@ -2203,7 +2203,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
iw.addDocument(doc); iw.addDocument(doc);
// pool readers // pool readers
DirectoryReader r = DirectoryReader.open(iw, false); DirectoryReader r = DirectoryReader.open(iw);
// sometimes sneak in a pending commit: we don't want to leak a file handle to that segments_N // sometimes sneak in a pending commit: we don't want to leak a file handle to that segments_N
if (random().nextBoolean()) { if (random().nextBoolean()) {
@ -2274,7 +2274,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
iw.addDocument(doc); iw.addDocument(doc);
// pool readers // pool readers
DirectoryReader r = DirectoryReader.open(iw, false); DirectoryReader r = DirectoryReader.open(iw);
// sometimes sneak in a pending commit: we don't want to leak a file handle to that segments_N // sometimes sneak in a pending commit: we don't want to leak a file handle to that segments_N
if (random().nextBoolean()) { if (random().nextBoolean()) {
@ -2351,7 +2351,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
w.addDocument(doc); w.addDocument(doc);
if (random().nextInt(10) == 7) { if (random().nextInt(10) == 7) {
// Flush new segment: // Flush new segment:
DirectoryReader.open(w, true).close(); DirectoryReader.open(w).close();
} }
} catch (AlreadyClosedException ace) { } catch (AlreadyClosedException ace) {
// OK: e.g. CMS hit the exc in BG thread and closed the writer // OK: e.g. CMS hit the exc in BG thread and closed the writer

View File

@ -38,7 +38,7 @@ public class TestIndexWriterFromReader extends LuceneTestCase {
w.addDocument(new Document()); w.addDocument(new Document());
w.commit(); w.commit();
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
assertEquals(1, r.maxDoc()); assertEquals(1, r.maxDoc());
w.close(); w.close();
@ -91,7 +91,7 @@ public class TestIndexWriterFromReader extends LuceneTestCase {
Directory dir = newDirectory(); Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig()); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
w.addDocument(new Document()); w.addDocument(new Document());
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
w.rollback(); w.rollback();
IndexWriterConfig iwc = newIndexWriterConfig(); IndexWriterConfig iwc = newIndexWriterConfig();
@ -120,7 +120,7 @@ public class TestIndexWriterFromReader extends LuceneTestCase {
w.commit(); w.commit();
w.addDocument(new Document()); w.addDocument(new Document());
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
assertEquals(2, r.maxDoc()); assertEquals(2, r.maxDoc());
w.close(); w.close();
@ -148,7 +148,7 @@ public class TestIndexWriterFromReader extends LuceneTestCase {
w.addDocument(new Document()); w.addDocument(new Document());
w.commit(); w.commit();
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
assertEquals(1, r.maxDoc()); assertEquals(1, r.maxDoc());
// Add another doc // Add another doc
@ -184,7 +184,7 @@ public class TestIndexWriterFromReader extends LuceneTestCase {
// an NRT reader, the commit before that NRT reader must exist // an NRT reader, the commit before that NRT reader must exist
w.commit(); w.commit();
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
int nrtReaderNumDocs = 0; int nrtReaderNumDocs = 0;
int writerNumDocs = 0; int writerNumDocs = 0;
@ -287,7 +287,7 @@ public class TestIndexWriterFromReader extends LuceneTestCase {
writerNumDocs = nrtReaderNumDocs; writerNumDocs = nrtReaderNumDocs;
liveIDs = new HashSet<>(nrtLiveIDs); liveIDs = new HashSet<>(nrtLiveIDs);
r.close(); r.close();
r = DirectoryReader.open(w, true); r = DirectoryReader.open(w);
} }
break; break;
@ -318,7 +318,7 @@ public class TestIndexWriterFromReader extends LuceneTestCase {
doc.add(newStringField("f0", "foo", Field.Store.NO)); doc.add(newStringField("f0", "foo", Field.Store.NO));
w.addDocument(doc); w.addDocument(doc);
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
assertEquals(1, r.maxDoc()); assertEquals(1, r.maxDoc());
doc = new Document(); doc = new Document();
@ -351,7 +351,7 @@ public class TestIndexWriterFromReader extends LuceneTestCase {
w.addDocument(new Document()); w.addDocument(new Document());
w.commit(); w.commit();
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
assertEquals(1, r.maxDoc()); assertEquals(1, r.maxDoc());
w.close(); w.close();
@ -374,7 +374,7 @@ public class TestIndexWriterFromReader extends LuceneTestCase {
w.addDocument(new Document()); w.addDocument(new Document());
w.commit(); w.commit();
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
assertEquals(1, r.maxDoc()); assertEquals(1, r.maxDoc());
IndexCommit commit = r.getIndexCommit(); IndexCommit commit = r.getIndexCommit();
r.close(); r.close();
@ -397,7 +397,7 @@ public class TestIndexWriterFromReader extends LuceneTestCase {
w.addDocument(new Document()); w.addDocument(new Document());
w.commit(); w.commit();
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
assertEquals(1, r.maxDoc()); assertEquals(1, r.maxDoc());
w.addDocument(new Document()); w.addDocument(new Document());
@ -412,7 +412,7 @@ public class TestIndexWriterFromReader extends LuceneTestCase {
assertEquals(1, w.numDocs()); assertEquals(1, w.numDocs());
r.close(); r.close();
DirectoryReader r3 = DirectoryReader.open(w, true); DirectoryReader r3 = DirectoryReader.open(w);
assertEquals(1, r3.numDocs()); assertEquals(1, r3.numDocs());
w.addDocument(new Document()); w.addDocument(new Document());
@ -432,7 +432,7 @@ public class TestIndexWriterFromReader extends LuceneTestCase {
w.commit(); w.commit();
w.addDocument(new Document()); w.addDocument(new Document());
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
assertEquals(2, r.maxDoc()); assertEquals(2, r.maxDoc());
w.rollback(); w.rollback();
@ -470,11 +470,11 @@ public class TestIndexWriterFromReader extends LuceneTestCase {
w.commit(); w.commit();
w.addDocument(new Document()); w.addDocument(new Document());
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
assertEquals(2, r.maxDoc()); assertEquals(2, r.maxDoc());
w.addDocument(new Document()); w.addDocument(new Document());
DirectoryReader r2 = DirectoryReader.open(w, true); DirectoryReader r2 = DirectoryReader.open(w);
assertEquals(3, r2.maxDoc()); assertEquals(3, r2.maxDoc());
IOUtils.close(r2, w); IOUtils.close(r2, w);

View File

@ -1139,7 +1139,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
doc.add(newStringField("id", ""+i, Field.Store.NO)); doc.add(newStringField("id", ""+i, Field.Store.NO));
w.addDocument(doc); w.addDocument(doc);
IndexReader r = DirectoryReader.open(w, true); IndexReader r = DirectoryReader.open(w);
// Make sure segment count never exceeds 100: // Make sure segment count never exceeds 100:
assertTrue(r.leaves().size() < 100); assertTrue(r.leaves().size() < 100);
r.close(); r.close();
@ -1156,7 +1156,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
w.addDocument(new Document()); w.addDocument(new Document());
// Pull NRT reader; it has 1 segment: // Pull NRT reader; it has 1 segment:
DirectoryReader r1 = DirectoryReader.open(w, true); DirectoryReader r1 = DirectoryReader.open(w);
assertEquals(1, r1.leaves().size()); assertEquals(1, r1.leaves().size());
w.addDocument(new Document()); w.addDocument(new Document());
w.commit(); w.commit();

View File

@ -82,7 +82,7 @@ public class TestIndexWriterThreadsToSegments extends LuceneTestCase {
startingGun.countDown(); startingGun.countDown();
startDone.await(); startDone.await();
IndexReader r = DirectoryReader.open(w, true); IndexReader r = DirectoryReader.open(w);
assertEquals(2, r.numDocs()); assertEquals(2, r.numDocs());
int numSegments = r.leaves().size(); int numSegments = r.leaves().size();
// 1 segment if the threads ran sequentially, else 2: // 1 segment if the threads ran sequentially, else 2:
@ -95,7 +95,7 @@ public class TestIndexWriterThreadsToSegments extends LuceneTestCase {
finalGun.countDown(); finalGun.countDown();
threads[1].join(); threads[1].join();
r = DirectoryReader.open(w, true); r = DirectoryReader.open(w);
assertEquals(4, r.numDocs()); assertEquals(4, r.numDocs());
// Both threads should have shared a single thread state since they did not try to index concurrently: // Both threads should have shared a single thread state since they did not try to index concurrently:
assertEquals(1+numSegments, r.leaves().size()); assertEquals(1+numSegments, r.leaves().size());
@ -118,7 +118,7 @@ public class TestIndexWriterThreadsToSegments extends LuceneTestCase {
this.w = w; this.w = w;
this.maxThreadCountPerIter = maxThreadCountPerIter; this.maxThreadCountPerIter = maxThreadCountPerIter;
this.indexingCount = indexingCount; this.indexingCount = indexingCount;
r = DirectoryReader.open(w, true); r = DirectoryReader.open(w);
assertEquals(0, r.leaves().size()); assertEquals(0, r.leaves().size());
setNextIterThreadCount(); setNextIterThreadCount();
} }

View File

@ -53,7 +53,7 @@ public class TestMaxPosition extends LuceneTestCase {
} }
// Document should not be visible: // Document should not be visible:
IndexReader r = DirectoryReader.open(iw, true); IndexReader r = DirectoryReader.open(iw);
assertEquals(0, r.numDocs()); assertEquals(0, r.numDocs());
r.close(); r.close();
@ -79,7 +79,7 @@ public class TestMaxPosition extends LuceneTestCase {
iw.addDocument(doc); iw.addDocument(doc);
// Document should be visible: // Document should be visible:
IndexReader r = DirectoryReader.open(iw, true); IndexReader r = DirectoryReader.open(iw);
assertEquals(1, r.numDocs()); assertEquals(1, r.numDocs());
PostingsEnum postings = MultiFields.getTermPositionsEnum(r, "foo", new BytesRef("foo")); PostingsEnum postings = MultiFields.getTermPositionsEnum(r, "foo", new BytesRef("foo"));

View File

@ -54,7 +54,7 @@ public class TestMixedDocValuesUpdates extends LuceneTestCase {
final boolean isNRT = random.nextBoolean(); final boolean isNRT = random.nextBoolean();
DirectoryReader reader; DirectoryReader reader;
if (isNRT) { if (isNRT) {
reader = DirectoryReader.open(writer, true); reader = DirectoryReader.open(writer);
} else { } else {
writer.commit(); writer.commit();
reader = DirectoryReader.open(dir); reader = DirectoryReader.open(dir);
@ -223,7 +223,7 @@ public class TestMixedDocValuesUpdates extends LuceneTestCase {
if (random.nextDouble() < 0.1) { // reopen NRT reader (apply updates), on average once every 10 updates if (random.nextDouble() < 0.1) { // reopen NRT reader (apply updates), on average once every 10 updates
if (reader == null) { if (reader == null) {
// System.out.println("[" + Thread.currentThread().getName() + "] open NRT"); // System.out.println("[" + Thread.currentThread().getName() + "] open NRT");
reader = DirectoryReader.open(writer, true); reader = DirectoryReader.open(writer);
} else { } else {
// System.out.println("[" + Thread.currentThread().getName() + "] reopen NRT"); // System.out.println("[" + Thread.currentThread().getName() + "] reopen NRT");
DirectoryReader r2 = DirectoryReader.openIfChanged(reader, writer, true); DirectoryReader r2 = DirectoryReader.openIfChanged(reader, writer, true);
@ -310,7 +310,7 @@ public class TestMixedDocValuesUpdates extends LuceneTestCase {
long value = random().nextLong(); long value = random().nextLong();
writer.updateDocValues(t, new BinaryDocValuesField("f", TestBinaryDocValuesUpdates.toBytes(value)), writer.updateDocValues(t, new BinaryDocValuesField("f", TestBinaryDocValuesUpdates.toBytes(value)),
new NumericDocValuesField("cf", value*2)); new NumericDocValuesField("cf", value*2));
DirectoryReader reader = DirectoryReader.open(writer, true); DirectoryReader reader = DirectoryReader.open(writer);
for (LeafReaderContext context : reader.leaves()) { for (LeafReaderContext context : reader.leaves()) {
LeafReader r = context.reader(); LeafReader r = context.reader();
BinaryDocValues fbdv = r.getBinaryDocValues("f"); BinaryDocValues fbdv = r.getBinaryDocValues("f");

View File

@ -56,7 +56,7 @@ public class TestMultiTermsEnum extends LuceneTestCase {
document.add(new StringField("deleted", "0", Field.Store.YES)); document.add(new StringField("deleted", "0", Field.Store.YES));
writer.addDocument(document); writer.addDocument(document);
DirectoryReader reader = DirectoryReader.open(writer, true); DirectoryReader reader = DirectoryReader.open(writer);
writer.close(); writer.close();
Directory directory2 = new RAMDirectory(); Directory directory2 = new RAMDirectory();

View File

@ -46,7 +46,7 @@ public class TestNRTThreads extends ThreadedIndexingAndSearchingTestCase {
boolean anyOpenDelFiles = false; boolean anyOpenDelFiles = false;
DirectoryReader r = DirectoryReader.open(writer, true); DirectoryReader r = DirectoryReader.open(writer);
while (System.currentTimeMillis() < stopTime && !failed.get()) { while (System.currentTimeMillis() < stopTime && !failed.get()) {
if (random().nextBoolean()) { if (random().nextBoolean()) {
@ -73,7 +73,7 @@ public class TestNRTThreads extends ThreadedIndexingAndSearchingTestCase {
if (VERBOSE) { if (VERBOSE) {
System.out.println("TEST: now open"); System.out.println("TEST: now open");
} }
r = DirectoryReader.open(writer, true); r = DirectoryReader.open(writer);
} }
if (VERBOSE) { if (VERBOSE) {
System.out.println("TEST: got new reader=" + r); System.out.println("TEST: got new reader=" + r);

View File

@ -109,7 +109,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
writer.close(); writer.close();
reader = DirectoryReader.open(dir); reader = DirectoryReader.open(dir);
} else { // NRT } else { // NRT
reader = DirectoryReader.open(writer, true); reader = DirectoryReader.open(writer);
writer.close(); writer.close();
} }
@ -152,7 +152,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
writer.close(); writer.close();
reader = DirectoryReader.open(dir); reader = DirectoryReader.open(dir);
} else { // NRT } else { // NRT
reader = DirectoryReader.open(writer, true); reader = DirectoryReader.open(writer);
writer.close(); writer.close();
} }
@ -182,7 +182,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
final boolean isNRT = random().nextBoolean(); final boolean isNRT = random().nextBoolean();
final DirectoryReader reader1; final DirectoryReader reader1;
if (isNRT) { if (isNRT) {
reader1 = DirectoryReader.open(writer, true); reader1 = DirectoryReader.open(writer);
} else { } else {
writer.commit(); writer.commit();
reader1 = DirectoryReader.open(dir); reader1 = DirectoryReader.open(dir);
@ -235,7 +235,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
writer.close(); writer.close();
reader = DirectoryReader.open(dir); reader = DirectoryReader.open(dir);
} else { // NRT } else { // NRT
reader = DirectoryReader.open(writer, true); reader = DirectoryReader.open(writer);
writer.close(); writer.close();
} }
@ -280,7 +280,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
writer.close(); writer.close();
reader = DirectoryReader.open(dir); reader = DirectoryReader.open(dir);
} else { // NRT } else { // NRT
reader = DirectoryReader.open(writer, true); reader = DirectoryReader.open(writer);
writer.close(); writer.close();
} }
@ -561,7 +561,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
writer.commit(); writer.commit();
reader = DirectoryReader.open(dir); reader = DirectoryReader.open(dir);
} else { } else {
reader = DirectoryReader.open(writer, true); reader = DirectoryReader.open(writer);
} }
assertEquals(1, reader.leaves().size()); assertEquals(1, reader.leaves().size());
@ -621,7 +621,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
final boolean isNRT = random.nextBoolean(); final boolean isNRT = random.nextBoolean();
DirectoryReader reader; DirectoryReader reader;
if (isNRT) { if (isNRT) {
reader = DirectoryReader.open(writer, true); reader = DirectoryReader.open(writer);
} else { } else {
writer.commit(); writer.commit();
reader = DirectoryReader.open(dir); reader = DirectoryReader.open(dir);
@ -976,7 +976,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
if (random.nextDouble() < 0.1) { // reopen NRT reader (apply updates), on average once every 10 updates if (random.nextDouble() < 0.1) { // reopen NRT reader (apply updates), on average once every 10 updates
if (reader == null) { if (reader == null) {
// System.out.println("[" + Thread.currentThread().getName() + "] open NRT"); // System.out.println("[" + Thread.currentThread().getName() + "] open NRT");
reader = DirectoryReader.open(writer, true); reader = DirectoryReader.open(writer);
} else { } else {
// System.out.println("[" + Thread.currentThread().getName() + "] reopen NRT"); // System.out.println("[" + Thread.currentThread().getName() + "] reopen NRT");
DirectoryReader r2 = DirectoryReader.openIfChanged(reader, writer, true); DirectoryReader r2 = DirectoryReader.openIfChanged(reader, writer, true);
@ -1057,7 +1057,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
Term t = new Term("id", "doc" + doc); Term t = new Term("id", "doc" + doc);
long value = random().nextLong(); long value = random().nextLong();
writer.updateDocValues(t, new NumericDocValuesField("f", value), new NumericDocValuesField("cf", value*2)); writer.updateDocValues(t, new NumericDocValuesField("f", value), new NumericDocValuesField("cf", value*2));
DirectoryReader reader = DirectoryReader.open(writer, true); DirectoryReader reader = DirectoryReader.open(writer);
for (LeafReaderContext context : reader.leaves()) { for (LeafReaderContext context : reader.leaves()) {
LeafReader r = context.reader(); LeafReader r = context.reader();
NumericDocValues fndv = r.getNumericDocValues("f"); NumericDocValues fndv = r.getNumericDocValues("f");
@ -1381,7 +1381,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
conf.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH); conf.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
writer = new IndexWriter(cachingDir, conf); writer = new IndexWriter(cachingDir, conf);
writer.updateNumericDocValue(new Term("id", "doc-0"), "val", 100L); writer.updateNumericDocValue(new Term("id", "doc-0"), "val", 100L);
DirectoryReader reader = DirectoryReader.open(writer, true); // flush DirectoryReader reader = DirectoryReader.open(writer); // flush
assertEquals(0, cachingDir.listCachedFiles().length); assertEquals(0, cachingDir.listCachedFiles().length);
IOUtils.close(reader, writer, cachingDir); IOUtils.close(reader, writer, cachingDir);

View File

@ -218,7 +218,7 @@ public class TestRollingUpdates extends LuceneTestCase {
writer.updateDocument(new Term("id", br), doc); writer.updateDocument(new Term("id", br), doc);
if (random().nextInt(3) == 0) { if (random().nextInt(3) == 0) {
if (open == null) { if (open == null) {
open = DirectoryReader.open(writer, true); open = DirectoryReader.open(writer);
} }
DirectoryReader reader = DirectoryReader.openIfChanged(open); DirectoryReader reader = DirectoryReader.openIfChanged(open);
if (reader != null) { if (reader != null) {

View File

@ -671,7 +671,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
} catch (IllegalArgumentException iae) { } catch (IllegalArgumentException iae) {
// expected // expected
} }
IndexReader r = DirectoryReader.open(iw, true); IndexReader r = DirectoryReader.open(iw);
// Make sure the exc didn't lose our first document: // Make sure the exc didn't lose our first document:
assertEquals(1, r.numDocs()); assertEquals(1, r.numDocs());

View File

@ -229,7 +229,7 @@ public class TestTieredMergePolicy extends BaseMergePolicyTestCase {
doc.add(newTextField("id", random().nextLong() + "" + random().nextLong(), Field.Store.YES)); doc.add(newTextField("id", random().nextLong() + "" + random().nextLong(), Field.Store.YES));
w.addDocument(doc); w.addDocument(doc);
} }
IndexReader r = DirectoryReader.open(w, true); IndexReader r = DirectoryReader.open(w);
// Make sure TMP always merged equal-number-of-docs segments: // Make sure TMP always merged equal-number-of-docs segments:
for(LeafReaderContext ctx : r.leaves()) { for(LeafReaderContext ctx : r.leaves()) {

View File

@ -57,7 +57,7 @@ public class TestTragicIndexWriterDeadlock extends LuceneTestCase {
} }
}; };
commitThread.start(); commitThread.start();
final DirectoryReader r0 = DirectoryReader.open(w, true); final DirectoryReader r0 = DirectoryReader.open(w);
Thread nrtThread = new Thread() { Thread nrtThread = new Thread() {
@Override @Override
public void run() { public void run() {

View File

@ -90,7 +90,7 @@ public class TestTryDelete extends LuceneTestCase
long result; long result;
if (random().nextBoolean()) { if (random().nextBoolean()) {
IndexReader r = DirectoryReader.open(writer, true); IndexReader r = DirectoryReader.open(writer);
result = mgrWriter.tryDeleteDocument(r, 0); result = mgrWriter.tryDeleteDocument(r, 0);
r.close(); r.close();
} else { } else {
@ -135,8 +135,7 @@ public class TestTryDelete extends LuceneTestCase
assertEquals(1, topDocs.totalHits); assertEquals(1, topDocs.totalHits);
TrackingIndexWriter mgrWriter = new TrackingIndexWriter(writer); TrackingIndexWriter mgrWriter = new TrackingIndexWriter(writer);
long result = mgrWriter.tryDeleteDocument(DirectoryReader.open(writer, long result = mgrWriter.tryDeleteDocument(DirectoryReader.open(writer), 0);
true), 0);
assertEquals(1, result); assertEquals(1, result);

View File

@ -450,7 +450,7 @@ public class TestBooleanQuery extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
doc.add(newTextField("field", "a b c d", Field.Store.NO)); doc.add(newTextField("field", "a b c d", Field.Store.NO));
w.addDocument(doc); w.addDocument(doc);
IndexReader r = DirectoryReader.open(w, true); IndexReader r = DirectoryReader.open(w);
IndexSearcher s = newSearcher(r); IndexSearcher s = newSearcher(r);
BooleanQuery.Builder bq = new BooleanQuery.Builder(); BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD); bq.add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD);

View File

@ -53,7 +53,7 @@ public class TestElevationComparator extends LuceneTestCase {
writer.addDocument(adoc(new String[] {"id", "y", "title", "boosted boosted", "str_s","y"})); writer.addDocument(adoc(new String[] {"id", "y", "title", "boosted boosted", "str_s","y"}));
writer.addDocument(adoc(new String[] {"id", "z", "title", "boosted boosted boosted","str_s", "z"})); writer.addDocument(adoc(new String[] {"id", "z", "title", "boosted boosted boosted","str_s", "z"}));
IndexReader r = DirectoryReader.open(writer, true); IndexReader r = DirectoryReader.open(writer);
writer.close(); writer.close();
IndexSearcher searcher = newSearcher(r); IndexSearcher searcher = newSearcher(r);

View File

@ -48,7 +48,7 @@ public class TestMatchAllDocsQuery extends LuceneTestCase {
addDoc("one", iw, 1f); addDoc("one", iw, 1f);
addDoc("two", iw, 20f); addDoc("two", iw, 20f);
addDoc("three four", iw, 300f); addDoc("three four", iw, 300f);
IndexReader ir = DirectoryReader.open(iw, true); IndexReader ir = DirectoryReader.open(iw);
IndexSearcher is = newSearcher(ir); IndexSearcher is = newSearcher(ir);
ScoreDoc[] hits; ScoreDoc[] hits;
@ -75,7 +75,7 @@ public class TestMatchAllDocsQuery extends LuceneTestCase {
iw.deleteDocuments(new Term("key", "one")); iw.deleteDocuments(new Term("key", "one"));
ir.close(); ir.close();
ir = DirectoryReader.open(iw, true); ir = DirectoryReader.open(iw);
is = newSearcher(ir); is = newSearcher(ir);
hits = is.search(new MatchAllDocsQuery(), 1000).scoreDocs; hits = is.search(new MatchAllDocsQuery(), 1000).scoreDocs;

View File

@ -48,7 +48,7 @@ public class TestMatchNoDocsQuery extends LuceneTestCase {
addDoc("one", iw); addDoc("one", iw);
addDoc("two", iw); addDoc("two", iw);
addDoc("three four", iw); addDoc("three four", iw);
IndexReader ir = DirectoryReader.open(iw, true); IndexReader ir = DirectoryReader.open(iw);
IndexSearcher is = newSearcher(ir); IndexSearcher is = newSearcher(ir);
ScoreDoc[] hits; ScoreDoc[] hits;

View File

@ -45,7 +45,7 @@ public class TestMultiPhraseEnum extends LuceneTestCase {
doc.add(new TextField("field", "foo bar", Field.Store.NO)); doc.add(new TextField("field", "foo bar", Field.Store.NO));
writer.addDocument(doc); writer.addDocument(doc);
DirectoryReader ir = DirectoryReader.open(writer, true); DirectoryReader ir = DirectoryReader.open(writer);
writer.close(); writer.close();
PostingsEnum p1 = getOnlySegmentReader(ir).postings(new Term("field", "foo"), PostingsEnum.POSITIONS); PostingsEnum p1 = getOnlySegmentReader(ir).postings(new Term("field", "foo"), PostingsEnum.POSITIONS);
@ -87,7 +87,7 @@ public class TestMultiPhraseEnum extends LuceneTestCase {
writer.addDocument(doc); writer.addDocument(doc);
writer.forceMerge(1); writer.forceMerge(1);
DirectoryReader ir = DirectoryReader.open(writer, true); DirectoryReader ir = DirectoryReader.open(writer);
writer.close(); writer.close();
PostingsEnum p1 = getOnlySegmentReader(ir).postings(new Term("field", "foo"), PostingsEnum.POSITIONS); PostingsEnum p1 = getOnlySegmentReader(ir).postings(new Term("field", "foo"), PostingsEnum.POSITIONS);

View File

@ -470,7 +470,7 @@ public class TestMultiPhraseQuery extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
doc.add(new TextField("field", new CannedTokenStream(INCR_0_DOC_TOKENS))); doc.add(new TextField("field", new CannedTokenStream(INCR_0_DOC_TOKENS)));
writer.addDocument(doc); writer.addDocument(doc);
IndexReader r = DirectoryReader.open(writer,false); IndexReader r = DirectoryReader.open(writer);
writer.close(); writer.close();
IndexSearcher s = newSearcher(r); IndexSearcher s = newSearcher(r);

View File

@ -227,7 +227,7 @@ public class TestBufferedIndexInput extends LuceneTestCase {
dir.allIndexInputs.clear(); dir.allIndexInputs.clear();
IndexReader reader = DirectoryReader.open(writer, true); IndexReader reader = DirectoryReader.open(writer);
Term aaa = new Term("content", "aaa"); Term aaa = new Term("content", "aaa");
Term bbb = new Term("content", "bbb"); Term bbb = new Term("content", "bbb");
@ -235,7 +235,7 @@ public class TestBufferedIndexInput extends LuceneTestCase {
dir.tweakBufferSizes(); dir.tweakBufferSizes();
writer.deleteDocuments(new Term("id", "0")); writer.deleteDocuments(new Term("id", "0"));
reader = DirectoryReader.open(writer, true); reader = DirectoryReader.open(writer);
IndexSearcher searcher = newSearcher(reader); IndexSearcher searcher = newSearcher(reader);
ScoreDoc[] hits = searcher.search(new TermQuery(bbb), 1000).scoreDocs; ScoreDoc[] hits = searcher.search(new TermQuery(bbb), 1000).scoreDocs;
dir.tweakBufferSizes(); dir.tweakBufferSizes();
@ -245,7 +245,7 @@ public class TestBufferedIndexInput extends LuceneTestCase {
dir.tweakBufferSizes(); dir.tweakBufferSizes();
writer.deleteDocuments(new Term("id", "4")); writer.deleteDocuments(new Term("id", "4"));
reader = DirectoryReader.open(writer, true); reader = DirectoryReader.open(writer);
searcher = newSearcher(reader); searcher = newSearcher(reader);
hits = searcher.search(new TermQuery(bbb), 1000).scoreDocs; hits = searcher.search(new TermQuery(bbb), 1000).scoreDocs;

View File

@ -57,7 +57,7 @@ public class TestFileSwitchDirectory extends BaseDirectoryTestCase {
setMergePolicy(newLogMergePolicy(false)).setCodec(TestUtil.getDefaultCodec()).setUseCompoundFile(false) setMergePolicy(newLogMergePolicy(false)).setCodec(TestUtil.getDefaultCodec()).setUseCompoundFile(false)
); );
TestIndexWriterReader.createIndexNoClose(true, "ram", writer); TestIndexWriterReader.createIndexNoClose(true, "ram", writer);
IndexReader reader = DirectoryReader.open(writer, true); IndexReader reader = DirectoryReader.open(writer);
assertEquals(100, reader.maxDoc()); assertEquals(100, reader.maxDoc());
writer.commit(); writer.commit();
// we should see only fdx,fdt files here // we should see only fdx,fdt files here

View File

@ -71,7 +71,7 @@ public class TestNRTCachingDirectory extends BaseDirectoryTestCase {
w.addDocument(doc); w.addDocument(doc);
if (random().nextInt(20) == 17) { if (random().nextInt(20) == 17) {
if (r == null) { if (r == null) {
r = DirectoryReader.open(w.w, false); r = DirectoryReader.open(w.w);
} else { } else {
final DirectoryReader r2 = DirectoryReader.openIfChanged(r); final DirectoryReader r2 = DirectoryReader.openIfChanged(r);
if (r2 != null) { if (r2 != null) {

View File

@ -326,7 +326,7 @@ public class TestFSTs extends LuceneTestCase {
writer.addDocument(doc); writer.addDocument(doc);
docCount++; docCount++;
} }
IndexReader r = DirectoryReader.open(writer, true); IndexReader r = DirectoryReader.open(writer);
writer.close(); writer.close();
final PositiveIntOutputs outputs = PositiveIntOutputs.getSingleton(); final PositiveIntOutputs outputs = PositiveIntOutputs.getSingleton();

View File

@ -114,7 +114,7 @@ public class DistanceFacetsExample implements Closeable {
writer.addDocument(doc); writer.addDocument(doc);
// Open near-real-time searcher // Open near-real-time searcher
searcher = new IndexSearcher(DirectoryReader.open(writer, true)); searcher = new IndexSearcher(DirectoryReader.open(writer));
writer.close(); writer.close();
} }

View File

@ -74,7 +74,7 @@ public class RangeFacetsExample implements Closeable {
} }
// Open near-real-time searcher // Open near-real-time searcher
searcher = new IndexSearcher(DirectoryReader.open(indexWriter, true)); searcher = new IndexSearcher(DirectoryReader.open(indexWriter));
indexWriter.close(); indexWriter.close();
} }

View File

@ -63,6 +63,12 @@ public class SearcherTaxonomyManager extends ReferenceManager<SearcherTaxonomyMa
/** Creates near-real-time searcher and taxonomy reader /** Creates near-real-time searcher and taxonomy reader
* from the corresponding writers. */ * from the corresponding writers. */
public SearcherTaxonomyManager(IndexWriter writer, SearcherFactory searcherFactory, DirectoryTaxonomyWriter taxoWriter) throws IOException {
this(writer, true, searcherFactory, taxoWriter);
}
/** Expert: creates near-real-time searcher and taxonomy reader
* from the corresponding writers, controlling whether deletes should be applied. */
public SearcherTaxonomyManager(IndexWriter writer, boolean applyAllDeletes, SearcherFactory searcherFactory, DirectoryTaxonomyWriter taxoWriter) throws IOException { public SearcherTaxonomyManager(IndexWriter writer, boolean applyAllDeletes, SearcherFactory searcherFactory, DirectoryTaxonomyWriter taxoWriter) throws IOException {
if (searcherFactory == null) { if (searcherFactory == null) {
searcherFactory = new SearcherFactory(); searcherFactory = new SearcherFactory();

View File

@ -217,7 +217,7 @@ public class DirectoryTaxonomyReader extends TaxonomyReader {
/** Open the {@link DirectoryReader} from this {@link /** Open the {@link DirectoryReader} from this {@link
* IndexWriter}. */ * IndexWriter}. */
protected DirectoryReader openIndexReader(IndexWriter writer) throws IOException { protected DirectoryReader openIndexReader(IndexWriter writer) throws IOException {
return DirectoryReader.open(writer, false); return DirectoryReader.open(writer);
} }
/** /**

View File

@ -52,7 +52,7 @@ public class TestCachedOrdinalsReader extends FacetTestCase {
doc.add(new FacetField("A", "2")); doc.add(new FacetField("A", "2"));
writer.addDocument(config.build(taxoWriter, doc)); writer.addDocument(config.build(taxoWriter, doc));
final DirectoryReader reader = DirectoryReader.open(writer, true); final DirectoryReader reader = DirectoryReader.open(writer);
final CachedOrdinalsReader ordsReader = new CachedOrdinalsReader(new DocValuesOrdinalsReader(FacetsConfig.DEFAULT_INDEX_FIELD_NAME)); final CachedOrdinalsReader ordsReader = new CachedOrdinalsReader(new DocValuesOrdinalsReader(FacetsConfig.DEFAULT_INDEX_FIELD_NAME));
Thread[] threads = new Thread[3]; Thread[] threads = new Thread[3];
for (int i = 0; i < threads.length; i++) { for (int i = 0; i < threads.length; i++) {

View File

@ -530,7 +530,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
iw.addDocument(config.build(taxoWriter, doc)); iw.addDocument(config.build(taxoWriter, doc));
} }
DirectoryReader r = DirectoryReader.open(iw, true); DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter); DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
FacetsCollector sfc = new FacetsCollector(); FacetsCollector sfc = new FacetsCollector();
@ -558,7 +558,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
iw.addDocument(config.build(taxoWriter, doc)); iw.addDocument(config.build(taxoWriter, doc));
} }
DirectoryReader r = DirectoryReader.open(iw, true); DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter); DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
FacetsCollector sfc = new FacetsCollector(); FacetsCollector sfc = new FacetsCollector();
@ -586,7 +586,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
doc.add(new FacetField("b", "1")); doc.add(new FacetField("b", "1"));
iw.addDocument(config.build(taxoWriter, doc)); iw.addDocument(config.build(taxoWriter, doc));
DirectoryReader r = DirectoryReader.open(iw, true); DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter); DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
final FacetsCollector sfc = new FacetsCollector(); final FacetsCollector sfc = new FacetsCollector();
@ -615,7 +615,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
iw.addDocument(config.build(taxoWriter, doc)); iw.addDocument(config.build(taxoWriter, doc));
} }
DirectoryReader r = DirectoryReader.open(iw, true); DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter); DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
FacetsCollector sfc = new FacetsCollector(); FacetsCollector sfc = new FacetsCollector();

View File

@ -265,7 +265,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
iw.addDocument(config.build(taxoWriter, doc)); iw.addDocument(config.build(taxoWriter, doc));
} }
DirectoryReader r = DirectoryReader.open(iw, true); DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter); DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
FacetsCollector fc = new FacetsCollector(true); FacetsCollector fc = new FacetsCollector(true);
@ -296,7 +296,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
iw.addDocument(config.build(taxoWriter, doc)); iw.addDocument(config.build(taxoWriter, doc));
} }
DirectoryReader r = DirectoryReader.open(iw, true); DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter); DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
FacetsCollector sfc = new FacetsCollector(); FacetsCollector sfc = new FacetsCollector();
@ -323,7 +323,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
iw.addDocument(config.build(taxoWriter, doc)); iw.addDocument(config.build(taxoWriter, doc));
} }
DirectoryReader r = DirectoryReader.open(iw, true); DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter); DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
ValueSource valueSource = new ValueSource() { ValueSource valueSource = new ValueSource() {
@ -377,7 +377,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
iw.addDocument(config.build(taxoWriter, doc)); iw.addDocument(config.build(taxoWriter, doc));
} }
DirectoryReader r = DirectoryReader.open(iw, true); DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter); DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
ValueSource valueSource = new LongFieldSource("price"); ValueSource valueSource = new LongFieldSource("price");
@ -408,7 +408,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
iw.addDocument(config.build(taxoWriter, doc)); iw.addDocument(config.build(taxoWriter, doc));
} }
DirectoryReader r = DirectoryReader.open(iw, true); DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter); DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
FacetsCollector fc = new FacetsCollector(true); FacetsCollector fc = new FacetsCollector(true);

View File

@ -1137,7 +1137,7 @@ public class TestPostingsHighlighter extends LuceneTestCase {
doc.add(new StoredField("body", "foo")); doc.add(new StoredField("body", "foo"));
iw.addDocument(doc); iw.addDocument(doc);
IndexReader ir = DirectoryReader.open(iw.w, true); IndexReader ir = DirectoryReader.open(iw.w);
iw.close(); iw.close();
IndexSearcher searcher = new IndexSearcher(ir); IndexSearcher searcher = new IndexSearcher(ir);

View File

@ -74,7 +74,7 @@ public class FastVectorHighlighterTest extends LuceneTestCase {
writer.addDocument(doc); writer.addDocument(doc);
FastVectorHighlighter highlighter = new FastVectorHighlighter(); FastVectorHighlighter highlighter = new FastVectorHighlighter();
IndexReader reader = DirectoryReader.open(writer, true); IndexReader reader = DirectoryReader.open(writer);
int docId = 0; int docId = 0;
FieldQuery fieldQuery = highlighter.getFieldQuery( new TermQuery(new Term("field", "foo")), reader ); FieldQuery fieldQuery = highlighter.getFieldQuery( new TermQuery(new Term("field", "foo")), reader );
String[] bestFragments = highlighter.getBestFragments(fieldQuery, reader, docId, "field", 54, 1); String[] bestFragments = highlighter.getBestFragments(fieldQuery, reader, docId, "field", 54, 1);
@ -104,7 +104,7 @@ public class FastVectorHighlighterTest extends LuceneTestCase {
writer.addDocument(doc); writer.addDocument(doc);
FastVectorHighlighter highlighter = new FastVectorHighlighter(); FastVectorHighlighter highlighter = new FastVectorHighlighter();
IndexReader reader = DirectoryReader.open(writer, true); IndexReader reader = DirectoryReader.open(writer);
int docId = 0; int docId = 0;
FieldQuery fieldQuery = highlighter.getFieldQuery( new CustomScoreQuery(new TermQuery(new Term("field", "foo"))), reader ); FieldQuery fieldQuery = highlighter.getFieldQuery( new CustomScoreQuery(new TermQuery(new Term("field", "foo"))), reader );
String[] bestFragments = highlighter.getBestFragments(fieldQuery, reader, docId, "field", 54, 1); String[] bestFragments = highlighter.getBestFragments(fieldQuery, reader, docId, "field", 54, 1);
@ -133,7 +133,7 @@ public class FastVectorHighlighterTest extends LuceneTestCase {
doc.add(text); doc.add(text);
writer.addDocument(doc); writer.addDocument(doc);
FastVectorHighlighter highlighter = new FastVectorHighlighter(); FastVectorHighlighter highlighter = new FastVectorHighlighter();
IndexReader reader = DirectoryReader.open(writer, true); IndexReader reader = DirectoryReader.open(writer);
int docId = 0; int docId = 0;
String field = "text"; String field = "text";
{ {
@ -179,7 +179,7 @@ public class FastVectorHighlighterTest extends LuceneTestCase {
doc.add(noLongTermField); doc.add(noLongTermField);
writer.addDocument(doc); writer.addDocument(doc);
FastVectorHighlighter highlighter = new FastVectorHighlighter(); FastVectorHighlighter highlighter = new FastVectorHighlighter();
IndexReader reader = DirectoryReader.open(writer, true); IndexReader reader = DirectoryReader.open(writer);
int docId = 0; int docId = 0;
String field = "no_long_term"; String field = "no_long_term";
{ {
@ -295,7 +295,7 @@ public class FastVectorHighlighterTest extends LuceneTestCase {
doc.add( new Field( "text", text.toString().trim(), type ) ); doc.add( new Field( "text", text.toString().trim(), type ) );
writer.addDocument(doc); writer.addDocument(doc);
FastVectorHighlighter highlighter = new FastVectorHighlighter(); FastVectorHighlighter highlighter = new FastVectorHighlighter();
IndexReader reader = DirectoryReader.open(writer, true); IndexReader reader = DirectoryReader.open(writer);
// This mimics what some query parsers do to <highlight words together> // This mimics what some query parsers do to <highlight words together>
BooleanQuery.Builder terms = new BooleanQuery.Builder(); BooleanQuery.Builder terms = new BooleanQuery.Builder();
@ -346,7 +346,7 @@ public class FastVectorHighlighterTest extends LuceneTestCase {
query.add(new Term("field", "very")); query.add(new Term("field", "very"));
FastVectorHighlighter highlighter = new FastVectorHighlighter(); FastVectorHighlighter highlighter = new FastVectorHighlighter();
IndexReader reader = DirectoryReader.open(writer, true); IndexReader reader = DirectoryReader.open(writer);
IndexSearcher searcher = newSearcher(reader); IndexSearcher searcher = newSearcher(reader);
TopDocs hits = searcher.search(query, 10); TopDocs hits = searcher.search(query, 10);
assertEquals(2, hits.totalHits); assertEquals(2, hits.totalHits);
@ -493,7 +493,7 @@ public class FastVectorHighlighterTest extends LuceneTestCase {
ScoreOrderFragmentsBuilder fragmentsBuilder = new ScoreOrderFragmentsBuilder(); ScoreOrderFragmentsBuilder fragmentsBuilder = new ScoreOrderFragmentsBuilder();
fragmentsBuilder.setDiscreteMultiValueHighlighting( true ); fragmentsBuilder.setDiscreteMultiValueHighlighting( true );
IndexReader reader = DirectoryReader.open(writer, true ); IndexReader reader = DirectoryReader.open(writer);
String[] preTags = new String[] { "<b>" }; String[] preTags = new String[] { "<b>" };
String[] postTags = new String[] { "</b>" }; String[] postTags = new String[] { "</b>" };
Encoder encoder = new DefaultEncoder(); Encoder encoder = new DefaultEncoder();
@ -547,7 +547,7 @@ public class FastVectorHighlighterTest extends LuceneTestCase {
writer.addDocument(doc); writer.addDocument(doc);
FastVectorHighlighter highlighter = new FastVectorHighlighter(); FastVectorHighlighter highlighter = new FastVectorHighlighter();
IndexReader reader = DirectoryReader.open(writer, true); IndexReader reader = DirectoryReader.open(writer);
int docId = 0; int docId = 0;
// query1: match // query1: match
@ -601,7 +601,7 @@ public class FastVectorHighlighterTest extends LuceneTestCase {
BaseFragmentsBuilder fragmentsBuilder = new SimpleFragmentsBuilder(); BaseFragmentsBuilder fragmentsBuilder = new SimpleFragmentsBuilder();
fragmentsBuilder.setDiscreteMultiValueHighlighting(true); fragmentsBuilder.setDiscreteMultiValueHighlighting(true);
FastVectorHighlighter highlighter = new FastVectorHighlighter(true, true, fragListBuilder, fragmentsBuilder); FastVectorHighlighter highlighter = new FastVectorHighlighter(true, true, fragListBuilder, fragmentsBuilder);
IndexReader reader = DirectoryReader.open(writer, true); IndexReader reader = DirectoryReader.open(writer);
int docId = 0; int docId = 0;
// Phrase that spans a field value // Phrase that spans a field value
@ -690,7 +690,7 @@ public class FastVectorHighlighterTest extends LuceneTestCase {
FastVectorHighlighter highlighter = new FastVectorHighlighter(); FastVectorHighlighter highlighter = new FastVectorHighlighter();
FragListBuilder fragListBuilder = new SimpleFragListBuilder(); FragListBuilder fragListBuilder = new SimpleFragListBuilder();
FragmentsBuilder fragmentsBuilder = new ScoreOrderFragmentsBuilder(); FragmentsBuilder fragmentsBuilder = new ScoreOrderFragmentsBuilder();
IndexReader reader = DirectoryReader.open( writer, true ); IndexReader reader = DirectoryReader.open(writer);
String[] preTags = new String[] { "<b>" }; String[] preTags = new String[] { "<b>" };
String[] postTags = new String[] { "</b>" }; String[] postTags = new String[] { "</b>" };
Encoder encoder = new DefaultEncoder(); Encoder encoder = new DefaultEncoder();

View File

@ -128,7 +128,7 @@ public class TestBlockJoin extends LuceneTestCase {
w.addDocuments(docs); w.addDocuments(docs);
w.commit(); w.commit();
IndexReader r = DirectoryReader.open(w, random().nextBoolean()); IndexReader r = DirectoryReader.open(w);
w.close(); w.close();
IndexSearcher s = new IndexSearcher(r); IndexSearcher s = new IndexSearcher(r);
BitSetProducer parentsFilter = new QueryBitSetProducer(new TermQuery(new Term("docType", "resume"))); BitSetProducer parentsFilter = new QueryBitSetProducer(new TermQuery(new Term("docType", "resume")));

View File

@ -208,7 +208,7 @@ public class TestBlockJoinSorting extends LuceneTestCase {
w.addDocuments(docs); w.addDocuments(docs);
w.commit(); w.commit();
IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(w.w, false)); IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(w.w));
w.close(); w.close();
BitSetProducer parentFilter = new QueryBitSetProducer(new TermQuery(new Term("__type", "parent"))); BitSetProducer parentFilter = new QueryBitSetProducer(new TermQuery(new Term("__type", "parent")));
CheckJoinIndex.check(searcher.getIndexReader(), parentFilter); CheckJoinIndex.check(searcher.getIndexReader(), parentFilter);

View File

@ -66,7 +66,7 @@ public class TestBlockJoinValidation extends LuceneTestCase {
indexWriter.addDocuments(segmentDocs); indexWriter.addDocuments(segmentDocs);
indexWriter.commit(); indexWriter.commit();
} }
indexReader = DirectoryReader.open(indexWriter, random().nextBoolean()); indexReader = DirectoryReader.open(indexWriter);
indexWriter.close(); indexWriter.close();
indexSearcher = new IndexSearcher(indexReader); indexSearcher = new IndexSearcher(indexReader);
parentsFilter = new QueryBitSetProducer(new WildcardQuery(new Term("parent", "*"))); parentsFilter = new QueryBitSetProducer(new WildcardQuery(new Term("parent", "*")));

View File

@ -439,10 +439,10 @@ public class TestDocTermOrds extends LuceneTestCase {
doc.add(newStringField("foo", "car", Field.Store.NO)); doc.add(newStringField("foo", "car", Field.Store.NO));
iw.addDocument(doc); iw.addDocument(doc);
DirectoryReader r1 = DirectoryReader.open(iw, true); DirectoryReader r1 = DirectoryReader.open(iw);
iw.deleteDocuments(new Term("foo", "baz")); iw.deleteDocuments(new Term("foo", "baz"));
DirectoryReader r2 = DirectoryReader.open(iw, true); DirectoryReader r2 = DirectoryReader.open(iw);
FieldCache.DEFAULT.getDocTermOrds(getOnlySegmentReader(r2), "foo", null); FieldCache.DEFAULT.getDocTermOrds(getOnlySegmentReader(r2), "foo", null);

View File

@ -1002,7 +1002,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
doc.add(newStringField("t", "1", Field.Store.NO)); doc.add(newStringField("t", "1", Field.Store.NO));
w.addDocument(doc); w.addDocument(doc);
IndexReader r = UninvertingReader.wrap(DirectoryReader.open(w, true), IndexReader r = UninvertingReader.wrap(DirectoryReader.open(w),
Collections.singletonMap("f", Type.SORTED)); Collections.singletonMap("f", Type.SORTED));
w.close(); w.close();
IndexSearcher s = newSearcher(r); IndexSearcher s = newSearcher(r);
@ -1063,7 +1063,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
w.commit(); w.commit();
} }
IndexReader r = UninvertingReader.wrap(DirectoryReader.open(w, true), IndexReader r = UninvertingReader.wrap(DirectoryReader.open(w),
Collections.singletonMap("id", Type.INTEGER)); Collections.singletonMap("id", Type.INTEGER));
w.close(); w.close();
Query q = new TermQuery(new Term("body", "text")); Query q = new TermQuery(new Term("body", "text"));

View File

@ -183,7 +183,7 @@ public class TestFieldCacheVsDocValues extends LuceneTestCase {
DirectoryReader r; DirectoryReader r;
try { try {
r = DirectoryReader.open(w, true); r = DirectoryReader.open(w);
} catch (IllegalArgumentException iae) { } catch (IllegalArgumentException iae) {
if (iae.getMessage().indexOf("is too large") == -1) { if (iae.getMessage().indexOf("is too large") == -1) {
throw iae; throw iae;
@ -269,7 +269,7 @@ public class TestFieldCacheVsDocValues extends LuceneTestCase {
w.addDocument(doc); w.addDocument(doc);
} }
DirectoryReader r = DirectoryReader.open(w, true); DirectoryReader r = DirectoryReader.open(w);
w.close(); w.close();
LeafReader ar = SlowCompositeReaderWrapper.wrap(r); LeafReader ar = SlowCompositeReaderWrapper.wrap(r);

View File

@ -68,7 +68,7 @@ public class TestFieldCacheWithThreads extends LuceneTestCase {
} }
w.forceMerge(1); w.forceMerge(1);
final IndexReader r = DirectoryReader.open(w, true); final IndexReader r = DirectoryReader.open(w);
w.close(); w.close();
assertEquals(1, r.leaves().size()); assertEquals(1, r.leaves().size());

View File

@ -82,7 +82,7 @@ public class TestFunctionRangeQuery extends FunctionTestSetup {
try { try {
writer.deleteDocuments(new FunctionRangeQuery(valueSource, 3, 3, true, true));//delete the one with #3 writer.deleteDocuments(new FunctionRangeQuery(valueSource, 3, 3, true, true));//delete the one with #3
assert writer.hasDeletions(); assert writer.hasDeletions();
try (IndexReader indexReader2 = DirectoryReader.open(writer, true)) {//applyAllDeletes try (IndexReader indexReader2 = DirectoryReader.open(writer)) {
IndexSearcher indexSearcher2 = new IndexSearcher(indexReader2); IndexSearcher indexSearcher2 = new IndexSearcher(indexReader2);
TopDocs topDocs = indexSearcher2.search(new FunctionRangeQuery(valueSource, 3, 4, true, true), N_DOCS); TopDocs topDocs = indexSearcher2.search(new FunctionRangeQuery(valueSource, 3, 4, true, true), N_DOCS);
expectScores(topDocs.scoreDocs, 4);//missing #3 because it's deleted expectScores(topDocs.scoreDocs, 4);//missing #3 because it's deleted

View File

@ -127,7 +127,7 @@ public class PayloadHelper {
doc.add(new TextField(NO_PAYLOAD_FIELD, English.intToEnglish(i), Field.Store.YES)); doc.add(new TextField(NO_PAYLOAD_FIELD, English.intToEnglish(i), Field.Store.YES));
writer.addDocument(doc); writer.addDocument(doc);
} }
reader = DirectoryReader.open(writer, true); reader = DirectoryReader.open(writer);
writer.close(); writer.close();
IndexSearcher searcher = LuceneTestCase.newSearcher(reader); IndexSearcher searcher = LuceneTestCase.newSearcher(reader);

View File

@ -1329,7 +1329,7 @@ public class TestQPHelper extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
doc.add(newTextField("field", "", Field.Store.NO)); doc.add(newTextField("field", "", Field.Store.NO));
w.addDocument(doc); w.addDocument(doc);
IndexReader r = DirectoryReader.open(w, true); IndexReader r = DirectoryReader.open(w);
IndexSearcher s = newSearcher(r); IndexSearcher s = newSearcher(r);
Query q = new StandardQueryParser(new CannedAnalyzer()).parse("\"a\"", "field"); Query q = new StandardQueryParser(new CannedAnalyzer()).parse("\"a\"", "field");

View File

@ -1092,7 +1092,7 @@ public abstract class QueryParserTestBase extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
doc.add(newTextField("field", "the wizard of ozzy", Field.Store.NO)); doc.add(newTextField("field", "the wizard of ozzy", Field.Store.NO));
w.addDocument(doc); w.addDocument(doc);
IndexReader r = DirectoryReader.open(w, true); IndexReader r = DirectoryReader.open(w);
w.close(); w.close();
IndexSearcher s = newSearcher(r); IndexSearcher s = newSearcher(r);

View File

@ -85,7 +85,7 @@ public class HttpReplicatorTest extends ReplicatorTestCase {
IndexWriterConfig conf = newIndexWriterConfig(null); IndexWriterConfig conf = newIndexWriterConfig(null);
conf.setIndexDeletionPolicy(new SnapshotDeletionPolicy(conf.getIndexDeletionPolicy())); conf.setIndexDeletionPolicy(new SnapshotDeletionPolicy(conf.getIndexDeletionPolicy()));
writer = new IndexWriter(serverIndexDir, conf); writer = new IndexWriter(serverIndexDir, conf);
reader = DirectoryReader.open(writer, false); reader = DirectoryReader.open(writer);
} }
@Override @Override

View File

@ -602,7 +602,7 @@ public abstract class BaseGeoPointTestCase extends LuceneTestCase {
if (random().nextBoolean()) { if (random().nextBoolean()) {
w.forceMerge(1); w.forceMerge(1);
} }
final IndexReader r = DirectoryReader.open(w, true); final IndexReader r = DirectoryReader.open(w);
w.close(); w.close();
// We can't wrap with "exotic" readers because the BKD query must see the BKDDVFormat: // We can't wrap with "exotic" readers because the BKD query must see the BKDDVFormat:

View File

@ -339,7 +339,7 @@ public class FreeTextSuggester extends Lookup implements Accountable {
writer.addDocument(doc); writer.addDocument(doc);
count++; count++;
} }
reader = DirectoryReader.open(writer, false); reader = DirectoryReader.open(writer);
Terms terms = MultiFields.getTerms(reader, "body"); Terms terms = MultiFields.getTerms(reader, "body");
if (terms == null) { if (terms == null) {

View File

@ -223,7 +223,7 @@ public class TestSuggestField extends LuceneTestCase {
iw.deleteDocuments(new Term("str_field", "delete")); iw.deleteDocuments(new Term("str_field", "delete"));
DirectoryReader reader = DirectoryReader.open(iw, true); DirectoryReader reader = DirectoryReader.open(iw);
SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader); SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader);
PrefixCompletionQuery query = new PrefixCompletionQuery(analyzer, new Term("suggest_field", "abc_")); PrefixCompletionQuery query = new PrefixCompletionQuery(analyzer, new Term("suggest_field", "abc_"));
TopSuggestDocs suggest = indexSearcher.suggest(query, numLive); TopSuggestDocs suggest = indexSearcher.suggest(query, numLive);
@ -285,7 +285,7 @@ public class TestSuggestField extends LuceneTestCase {
iw.deleteDocuments(new Term("delete", "delete")); iw.deleteDocuments(new Term("delete", "delete"));
DirectoryReader reader = DirectoryReader.open(iw, true); DirectoryReader reader = DirectoryReader.open(iw);
SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader); SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader);
PrefixCompletionQuery query = new PrefixCompletionQuery(analyzer, new Term("suggest_field", "abc_")); PrefixCompletionQuery query = new PrefixCompletionQuery(analyzer, new Term("suggest_field", "abc_"));
TopSuggestDocs suggest = indexSearcher.suggest(query, num); TopSuggestDocs suggest = indexSearcher.suggest(query, num);
@ -315,7 +315,7 @@ public class TestSuggestField extends LuceneTestCase {
iw.deleteDocuments(DimensionalRangeQuery.new1DIntRange("weight_fld", 2, true, null, false)); iw.deleteDocuments(DimensionalRangeQuery.new1DIntRange("weight_fld", 2, true, null, false));
DirectoryReader reader = DirectoryReader.open(iw, true); DirectoryReader reader = DirectoryReader.open(iw);
SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader); SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader);
PrefixCompletionQuery query = new PrefixCompletionQuery(analyzer, new Term("suggest_field", "abc_")); PrefixCompletionQuery query = new PrefixCompletionQuery(analyzer, new Term("suggest_field", "abc_"));
TopSuggestDocs suggest = indexSearcher.suggest(query, 1); TopSuggestDocs suggest = indexSearcher.suggest(query, 1);

View File

@ -300,7 +300,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
oneDoc.add(customField); oneDoc.add(customField);
oneDoc.add(new NumericDocValuesField("field", 5)); oneDoc.add(new NumericDocValuesField("field", 5));
iw.addDocument(oneDoc); iw.addDocument(oneDoc);
LeafReader oneDocReader = getOnlySegmentReader(DirectoryReader.open(iw, true)); LeafReader oneDocReader = getOnlySegmentReader(DirectoryReader.open(iw));
iw.close(); iw.close();
// now feed to codec apis manually // now feed to codec apis manually

View File

@ -567,7 +567,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
Document doc = new Document(); Document doc = new Document();
doc.add(new StringField("foo", "bar", Field.Store.NO)); doc.add(new StringField("foo", "bar", Field.Store.NO));
iw.addDocument(doc); iw.addDocument(doc);
DirectoryReader reader = DirectoryReader.open(iw, false); DirectoryReader reader = DirectoryReader.open(iw);
// sugar method (FREQS) // sugar method (FREQS)
PostingsEnum postings = getOnlySegmentReader(reader).postings(new Term("foo", "bar")); PostingsEnum postings = getOnlySegmentReader(reader).postings(new Term("foo", "bar"));
@ -625,7 +625,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS); ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS);
doc.add(new Field("foo", "bar bar", ft)); doc.add(new Field("foo", "bar bar", ft));
iw.addDocument(doc); iw.addDocument(doc);
DirectoryReader reader = DirectoryReader.open(iw, false); DirectoryReader reader = DirectoryReader.open(iw);
// sugar method (FREQS) // sugar method (FREQS)
PostingsEnum postings = getOnlySegmentReader(reader).postings(new Term("foo", "bar")); PostingsEnum postings = getOnlySegmentReader(reader).postings(new Term("foo", "bar"));
@ -703,7 +703,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
Document doc = new Document(); Document doc = new Document();
doc.add(new TextField("foo", "bar bar", Field.Store.NO)); doc.add(new TextField("foo", "bar bar", Field.Store.NO));
iw.addDocument(doc); iw.addDocument(doc);
DirectoryReader reader = DirectoryReader.open(iw, false); DirectoryReader reader = DirectoryReader.open(iw);
// sugar method (FREQS) // sugar method (FREQS)
PostingsEnum postings = getOnlySegmentReader(reader).postings(new Term("foo", "bar")); PostingsEnum postings = getOnlySegmentReader(reader).postings(new Term("foo", "bar"));
@ -883,7 +883,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
doc.add(new Field("foo", "bar bar", ft)); doc.add(new Field("foo", "bar bar", ft));
iw.addDocument(doc); iw.addDocument(doc);
DirectoryReader reader = DirectoryReader.open(iw, false); DirectoryReader reader = DirectoryReader.open(iw);
// sugar method (FREQS) // sugar method (FREQS)
PostingsEnum postings = getOnlySegmentReader(reader).postings(new Term("foo", "bar")); PostingsEnum postings = getOnlySegmentReader(reader).postings(new Term("foo", "bar"));
@ -1068,7 +1068,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
token2.setPayload(new BytesRef("pay2")); token2.setPayload(new BytesRef("pay2"));
doc.add(new TextField("foo", new CannedTokenStream(token1, token2))); doc.add(new TextField("foo", new CannedTokenStream(token1, token2)));
iw.addDocument(doc); iw.addDocument(doc);
DirectoryReader reader = DirectoryReader.open(iw, false); DirectoryReader reader = DirectoryReader.open(iw);
// sugar method (FREQS) // sugar method (FREQS)
PostingsEnum postings = getOnlySegmentReader(reader).postings(new Term("foo", "bar")); PostingsEnum postings = getOnlySegmentReader(reader).postings(new Term("foo", "bar"));
@ -1254,7 +1254,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
doc.add(new Field("foo", new CannedTokenStream(token1, token2), ft)); doc.add(new Field("foo", new CannedTokenStream(token1, token2), ft));
iw.addDocument(doc); iw.addDocument(doc);
DirectoryReader reader = DirectoryReader.open(iw, false); DirectoryReader reader = DirectoryReader.open(iw);
// sugar method (FREQS) // sugar method (FREQS)
PostingsEnum postings = getOnlySegmentReader(reader).postings(new Term("foo", "bar")); PostingsEnum postings = getOnlySegmentReader(reader).postings(new Term("foo", "bar"));

View File

@ -780,7 +780,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
iw.addDocument(doc); iw.addDocument(doc);
} }
DirectoryReader reader = DirectoryReader.open(iw, true); DirectoryReader reader = DirectoryReader.open(iw);
// mix up fields explicitly // mix up fields explicitly
if (random().nextBoolean()) { if (random().nextBoolean()) {
reader = new MismatchedDirectoryReader(reader, random()); reader = new MismatchedDirectoryReader(reader, random());
@ -799,7 +799,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
iw.addIndexes(dirs); iw.addIndexes(dirs);
iw.forceMerge(1); iw.forceMerge(1);
LeafReader ir = getOnlySegmentReader(DirectoryReader.open(iw, true)); LeafReader ir = getOnlySegmentReader(DirectoryReader.open(iw));
for (int i = 0; i < ir.maxDoc(); i++) { for (int i = 0; i < ir.maxDoc(); i++) {
Document doc = ir.document(i); Document doc = ir.document(i);
assertEquals(10, doc.getFields().size()); assertEquals(10, doc.getFields().size());

View File

@ -767,7 +767,7 @@ public abstract class BaseTermVectorsFormatTestCase extends BaseIndexFileFormatT
ft.setStoreTermVectors(true); ft.setStoreTermVectors(true);
doc.add(new Field("foo", "bar bar", ft)); doc.add(new Field("foo", "bar bar", ft));
iw.addDocument(doc); iw.addDocument(doc);
DirectoryReader reader = DirectoryReader.open(iw, false); DirectoryReader reader = DirectoryReader.open(iw);
Terms terms = getOnlySegmentReader(reader).getTermVector(0, "foo"); Terms terms = getOnlySegmentReader(reader).getTermVector(0, "foo");
TermsEnum termsEnum = terms.iterator(); TermsEnum termsEnum = terms.iterator();
@ -848,7 +848,7 @@ public abstract class BaseTermVectorsFormatTestCase extends BaseIndexFileFormatT
ft.setStoreTermVectorPositions(true); ft.setStoreTermVectorPositions(true);
doc.add(new Field("foo", "bar bar", ft)); doc.add(new Field("foo", "bar bar", ft));
iw.addDocument(doc); iw.addDocument(doc);
DirectoryReader reader = DirectoryReader.open(iw, false); DirectoryReader reader = DirectoryReader.open(iw);
Terms terms = getOnlySegmentReader(reader).getTermVector(0, "foo"); Terms terms = getOnlySegmentReader(reader).getTermVector(0, "foo");
TermsEnum termsEnum = terms.iterator(); TermsEnum termsEnum = terms.iterator();
@ -1027,7 +1027,7 @@ public abstract class BaseTermVectorsFormatTestCase extends BaseIndexFileFormatT
ft.setStoreTermVectorOffsets(true); ft.setStoreTermVectorOffsets(true);
doc.add(new Field("foo", "bar bar", ft)); doc.add(new Field("foo", "bar bar", ft));
iw.addDocument(doc); iw.addDocument(doc);
DirectoryReader reader = DirectoryReader.open(iw, false); DirectoryReader reader = DirectoryReader.open(iw);
Terms terms = getOnlySegmentReader(reader).getTermVector(0, "foo"); Terms terms = getOnlySegmentReader(reader).getTermVector(0, "foo");
TermsEnum termsEnum = terms.iterator(); TermsEnum termsEnum = terms.iterator();
@ -1213,7 +1213,7 @@ public abstract class BaseTermVectorsFormatTestCase extends BaseIndexFileFormatT
ft.setStoreTermVectorOffsets(true); ft.setStoreTermVectorOffsets(true);
doc.add(new Field("foo", "bar bar", ft)); doc.add(new Field("foo", "bar bar", ft));
iw.addDocument(doc); iw.addDocument(doc);
DirectoryReader reader = DirectoryReader.open(iw, false); DirectoryReader reader = DirectoryReader.open(iw);
Terms terms = getOnlySegmentReader(reader).getTermVector(0, "foo"); Terms terms = getOnlySegmentReader(reader).getTermVector(0, "foo");
TermsEnum termsEnum = terms.iterator(); TermsEnum termsEnum = terms.iterator();
@ -1399,7 +1399,7 @@ public abstract class BaseTermVectorsFormatTestCase extends BaseIndexFileFormatT
ft.setStoreTermVectorPayloads(true); ft.setStoreTermVectorPayloads(true);
doc.add(new Field("foo", new CannedTokenStream(token1, token2), ft)); doc.add(new Field("foo", new CannedTokenStream(token1, token2), ft));
iw.addDocument(doc); iw.addDocument(doc);
DirectoryReader reader = DirectoryReader.open(iw, false); DirectoryReader reader = DirectoryReader.open(iw);
Terms terms = getOnlySegmentReader(reader).getTermVector(0, "foo"); Terms terms = getOnlySegmentReader(reader).getTermVector(0, "foo");
TermsEnum termsEnum = terms.iterator(); TermsEnum termsEnum = terms.iterator();
@ -1585,7 +1585,7 @@ public abstract class BaseTermVectorsFormatTestCase extends BaseIndexFileFormatT
ft.setStoreTermVectorOffsets(true); ft.setStoreTermVectorOffsets(true);
doc.add(new Field("foo", new CannedTokenStream(token1, token2), ft)); doc.add(new Field("foo", new CannedTokenStream(token1, token2), ft));
iw.addDocument(doc); iw.addDocument(doc);
DirectoryReader reader = DirectoryReader.open(iw, false); DirectoryReader reader = DirectoryReader.open(iw);
Terms terms = getOnlySegmentReader(reader).getTermVector(0, "foo"); Terms terms = getOnlySegmentReader(reader).getTermVector(0, "foo");
TermsEnum termsEnum = terms.iterator(); TermsEnum termsEnum = terms.iterator();