mirror of https://github.com/apache/lucene.git
LUCENE-1705: add deleteAll to IndexWriter
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@788785 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
c7f865a4c7
commit
64a55ede8b
|
@ -3236,6 +3236,53 @@ public class IndexWriter {
|
|||
closeInternal(false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete all documents in the index.
|
||||
*
|
||||
* <p>This method will drop all buffered documents and will
|
||||
* remove all segments from the index. This change will not be
|
||||
* visible until a {@link #commit()} has been called. This method
|
||||
* can be rolled back using {@link #rollback()}.</p>
|
||||
*
|
||||
* <p>NOTE: this method is much faster than using deleteDocuments( new MatchAllDocsQuery() ).</p>
|
||||
*
|
||||
* <p>NOTE: this method will forcefully abort all merges
|
||||
* in progress. If other threads are running {@link
|
||||
* #optimize()} or any of the addIndexes methods, they
|
||||
* will receive {@link MergePolicy.MergeAbortedException}s.
|
||||
*/
|
||||
public synchronized void deleteAll() throws IOException {
|
||||
docWriter.pauseAllThreads();
|
||||
try {
|
||||
|
||||
// Abort any running merges
|
||||
finishMerges(false);
|
||||
|
||||
// Remove any buffered docs
|
||||
docWriter.abort();
|
||||
|
||||
// Remove all segments
|
||||
segmentInfos.clear();
|
||||
|
||||
// Ask deleter to locate unreferenced files & remove them:
|
||||
deleter.checkpoint(segmentInfos, false);
|
||||
deleter.refresh();
|
||||
|
||||
// Don't bother saving any changes in our segmentInfos
|
||||
readerPool.clear(null);
|
||||
|
||||
// Mark that the index has changed
|
||||
++changeCount;
|
||||
} catch (OutOfMemoryError oom) {
|
||||
handleOOM(oom, "deleteAll");
|
||||
} finally {
|
||||
docWriter.resumeAllThreads();
|
||||
if (infoStream != null) {
|
||||
message("hit exception during deleteAll");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private synchronized void finishMerges(boolean waitForMerges) throws IOException {
|
||||
if (!waitForMerges) {
|
||||
|
||||
|
|
|
@ -264,6 +264,138 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
// test deleteAll()
|
||||
public void testDeleteAll() throws IOException {
|
||||
for (int pass=0;pass<2;pass++) {
|
||||
boolean autoCommit = (0==pass);
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, autoCommit,
|
||||
new WhitespaceAnalyzer(), true);
|
||||
modifier.setMaxBufferedDocs(2);
|
||||
modifier.setMaxBufferedDeleteTerms(2);
|
||||
|
||||
int id = 0;
|
||||
int value = 100;
|
||||
|
||||
for (int i = 0; i < 7; i++) {
|
||||
addDoc(modifier, ++id, value);
|
||||
}
|
||||
modifier.commit();
|
||||
|
||||
IndexReader reader = IndexReader.open(dir);
|
||||
assertEquals(7, reader.numDocs());
|
||||
reader.close();
|
||||
|
||||
// Add 1 doc (so we will have something buffered)
|
||||
addDoc(modifier, 99, value);
|
||||
|
||||
// Delete all
|
||||
modifier.deleteAll();
|
||||
|
||||
// Delete all shouldn't be on disk yet
|
||||
reader = IndexReader.open(dir);
|
||||
assertEquals(7, reader.numDocs());
|
||||
reader.close();
|
||||
|
||||
// Add 2 new docs (after the deleteAll, before the commit)
|
||||
addDoc(modifier, 101, value);
|
||||
addDoc(modifier, 102, value);
|
||||
|
||||
// commit the delete all
|
||||
modifier.commit();
|
||||
|
||||
// Validate there are no docs left
|
||||
reader = IndexReader.open(dir);
|
||||
assertEquals(2, reader.numDocs());
|
||||
reader.close();
|
||||
|
||||
modifier.close();
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
||||
// test rollback of deleteAll()
|
||||
public void testDeleteAllRollback() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, false,
|
||||
new WhitespaceAnalyzer(), true);
|
||||
modifier.setMaxBufferedDocs(2);
|
||||
modifier.setMaxBufferedDeleteTerms(2);
|
||||
|
||||
int id = 0;
|
||||
int value = 100;
|
||||
|
||||
for (int i = 0; i < 7; i++) {
|
||||
addDoc(modifier, ++id, value);
|
||||
}
|
||||
modifier.commit();
|
||||
|
||||
addDoc(modifier, ++id, value);
|
||||
|
||||
IndexReader reader = IndexReader.open(dir);
|
||||
assertEquals(7, reader.numDocs());
|
||||
reader.close();
|
||||
|
||||
// Delete all
|
||||
modifier.deleteAll();
|
||||
|
||||
// Roll it back
|
||||
modifier.rollback();
|
||||
modifier.close();
|
||||
|
||||
// Validate that the docs are still there
|
||||
reader = IndexReader.open(dir);
|
||||
assertEquals(7, reader.numDocs());
|
||||
reader.close();
|
||||
|
||||
dir.close();
|
||||
}
|
||||
|
||||
|
||||
// test deleteAll() w/ near real-time reader
|
||||
public void testDeleteAllNRT() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, false,
|
||||
new WhitespaceAnalyzer(), true);
|
||||
modifier.setMaxBufferedDocs(2);
|
||||
modifier.setMaxBufferedDeleteTerms(2);
|
||||
|
||||
int id = 0;
|
||||
int value = 100;
|
||||
|
||||
for (int i = 0; i < 7; i++) {
|
||||
addDoc(modifier, ++id, value);
|
||||
}
|
||||
modifier.commit();
|
||||
|
||||
IndexReader reader = modifier.getReader();
|
||||
assertEquals(7, reader.numDocs());
|
||||
reader.close();
|
||||
|
||||
addDoc(modifier, ++id, value);
|
||||
addDoc(modifier, ++id, value);
|
||||
|
||||
// Delete all
|
||||
modifier.deleteAll();
|
||||
|
||||
reader = modifier.getReader();
|
||||
assertEquals(0, reader.numDocs());
|
||||
reader.close();
|
||||
|
||||
|
||||
// Roll it back
|
||||
modifier.rollback();
|
||||
modifier.close();
|
||||
|
||||
// Validate that the docs are still there
|
||||
reader = IndexReader.open(dir);
|
||||
assertEquals(7, reader.numDocs());
|
||||
reader.close();
|
||||
|
||||
dir.close();
|
||||
}
|
||||
|
||||
|
||||
private void addDoc(IndexWriter modifier, int id, int value)
|
||||
throws IOException {
|
||||
Document doc = new Document();
|
||||
|
|
Loading…
Reference in New Issue