LUCENE-2467: fix memory leaks in IW when indexing very large docs

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@945420 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2010-05-17 23:12:34 +00:00
parent 82bc123339
commit 3e53e6561e
5 changed files with 22 additions and 6 deletions

View File

@ -391,6 +391,9 @@ Bug fixes
addition, all of IndexComnmit methods which threw
UnsupportedOperationException are now abstract. (Shai Erera)
* LUCENE-2467: Fixed memory leaks in IndexWriter when large documents
are indexed. (Mike McCandless)
New features
* LUCENE-2128: Parallelized fetching document frequencies during weight

View File

@ -1314,11 +1314,14 @@ final class DocumentsWriter {
}
/* Return byte[]'s to the pool */
@Override
void recycleByteBlocks(byte[][] blocks, int start, int end) {
synchronized(DocumentsWriter.this) {
for(int i=start;i<end;i++)
for(int i=start;i<end;i++) {
freeByteBlocks.add(blocks[i]);
blocks[i] = null;
}
}
}
@ -1326,9 +1329,11 @@ final class DocumentsWriter {
void recycleByteBlocks(List<byte[]> blocks) {
synchronized(DocumentsWriter.this) {
final int size = blocks.size();
for(int i=0;i<size;i++)
for(int i=0;i<size;i++) {
freeByteBlocks.add(blocks.get(i));
}
blocks.set(i, null);
}
}
}
}
@ -1358,8 +1363,10 @@ final class DocumentsWriter {
/* Return int[]s to the pool */
synchronized void recycleIntBlocks(int[][] blocks, int start, int end) {
for(int i=start;i<end;i++)
for(int i=start;i<end;i++) {
freeIntBlocks.add(blocks[i]);
blocks[i] = null;
}
}
ByteBlockAllocator byteBlockAllocator = new ByteBlockAllocator(BYTE_BLOCK_SIZE);

View File

@ -3624,7 +3624,11 @@ public class IndexWriter implements Closeable {
// even while a flush is happening
private synchronized final boolean doFlush(boolean flushDocStores, boolean flushDeletes) throws CorruptIndexException, IOException {
try {
return doFlushInternal(flushDocStores, flushDeletes);
try {
return doFlushInternal(flushDocStores, flushDeletes);
} finally {
docWriter.balanceRAM();
}
} finally {
docWriter.clearFlushPending();
}

View File

@ -43,6 +43,8 @@ final class ReusableStringReader extends Reader {
left -= len;
return len;
} else if (0 == left) {
// don't keep a reference (s could have been very large)
s = null;
return -1;
} else {
s.getChars(upto, upto+left, c, off);

View File

@ -190,7 +190,6 @@ final class TermVectorsTermsWriter extends TermsHashConsumer {
lastPos = pos;
}
perDoc.perDocTvf.writeTo(tvf);
perDoc.perDocTvf.reset();
perDoc.numVectorFields = 0;
}
@ -198,6 +197,7 @@ final class TermVectorsTermsWriter extends TermsHashConsumer {
lastDocID++;
perDoc.reset();
free(perDoc);
assert docWriter.writer.testPoint("TermVectorsTermsWriter.finishDocument end");
}