mirror of https://github.com/apache/lucene.git
LUCENE-1301: fix cause of rare NPE in TestIndexWriterExceptions
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@683206 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
60b43961da
commit
8d8e83518c
|
@ -44,11 +44,11 @@ final class TermsHash extends InvertedDocConsumer {
|
||||||
final int postingsFreeChunk;
|
final int postingsFreeChunk;
|
||||||
final DocumentsWriter docWriter;
|
final DocumentsWriter docWriter;
|
||||||
|
|
||||||
TermsHash primaryTermsHash;
|
private TermsHash primaryTermsHash;
|
||||||
|
|
||||||
RawPostingList[] postingsFreeList = new RawPostingList[1];
|
private RawPostingList[] postingsFreeList = new RawPostingList[1];
|
||||||
int postingsFreeCount;
|
private int postingsFreeCount;
|
||||||
int postingsAllocCount;
|
private int postingsAllocCount;
|
||||||
boolean trackAllocations;
|
boolean trackAllocations;
|
||||||
|
|
||||||
public TermsHash(final DocumentsWriter docWriter, boolean trackAllocations, final TermsHashConsumer consumer, final TermsHash nextTermsHash) {
|
public TermsHash(final DocumentsWriter docWriter, boolean trackAllocations, final TermsHashConsumer consumer, final TermsHash nextTermsHash) {
|
||||||
|
@ -176,17 +176,6 @@ final class TermsHash extends InvertedDocConsumer {
|
||||||
return any;
|
return any;
|
||||||
}
|
}
|
||||||
|
|
||||||
// USE ONLY FOR DEBUGGING!
|
|
||||||
/*
|
|
||||||
public String getPostingText() {
|
|
||||||
char[] text = charPool.buffers[p.textStart >> CHAR_BLOCK_SHIFT];
|
|
||||||
int upto = p.textStart & CHAR_BLOCK_MASK;
|
|
||||||
while(text[upto] != 0xffff)
|
|
||||||
upto++;
|
|
||||||
return new String(text, p.textStart, upto-(p.textStart & BYTE_BLOCK_MASK));
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
synchronized public void recyclePostings(final RawPostingList[] postings, final int numPostings) {
|
synchronized public void recyclePostings(final RawPostingList[] postings, final int numPostings) {
|
||||||
|
|
||||||
assert postings.length >= numPostings;
|
assert postings.length >= numPostings;
|
||||||
|
@ -219,19 +208,21 @@ final class TermsHash extends InvertedDocConsumer {
|
||||||
postings, 0, numToCopy);
|
postings, 0, numToCopy);
|
||||||
|
|
||||||
// Directly allocate the remainder if any
|
// Directly allocate the remainder if any
|
||||||
if (numToCopy < postings.length) {
|
if (numToCopy != postings.length) {
|
||||||
final int extra = postings.length - numToCopy;
|
final int extra = postings.length - numToCopy;
|
||||||
final int newPostingsAllocCount = postingsAllocCount + extra;
|
final int newPostingsAllocCount = postingsAllocCount + extra;
|
||||||
|
|
||||||
if (newPostingsAllocCount > postingsFreeList.length)
|
|
||||||
postingsFreeList = new RawPostingList[ArrayUtil.getNextSize(newPostingsAllocCount)];
|
|
||||||
|
|
||||||
consumer.createPostings(postings, numToCopy, extra);
|
consumer.createPostings(postings, numToCopy, extra);
|
||||||
assert docWriter.writer.testPoint("TermsHash.getPostings after create");
|
assert docWriter.writer.testPoint("TermsHash.getPostings after create");
|
||||||
postingsAllocCount += extra;
|
postingsAllocCount += extra;
|
||||||
|
|
||||||
if (trackAllocations)
|
if (trackAllocations)
|
||||||
docWriter.bytesAllocated(extra * bytesPerPosting);
|
docWriter.bytesAllocated(extra * bytesPerPosting);
|
||||||
|
|
||||||
|
if (newPostingsAllocCount > postingsFreeList.length)
|
||||||
|
// Pre-allocate the postingsFreeList so it's large
|
||||||
|
// enough to hold all postings we've given out
|
||||||
|
postingsFreeList = new RawPostingList[ArrayUtil.getNextSize(newPostingsAllocCount)];
|
||||||
}
|
}
|
||||||
|
|
||||||
postingsFreeCount -= numToCopy;
|
postingsFreeCount -= numToCopy;
|
||||||
|
|
|
@ -74,8 +74,13 @@ final class TermsHashPerThread extends InvertedDocConsumerPerThread {
|
||||||
assert freePostingsCount == 0;
|
assert freePostingsCount == 0;
|
||||||
termsHash.getPostings(freePostings);
|
termsHash.getPostings(freePostings);
|
||||||
freePostingsCount = freePostings.length;
|
freePostingsCount = freePostings.length;
|
||||||
for(int i=0;i<freePostingsCount;i++)
|
assert noNullPostings(freePostings, freePostingsCount, "consumer=" + consumer);
|
||||||
assert freePostings[i] != null;
|
}
|
||||||
|
|
||||||
|
private static boolean noNullPostings(RawPostingList[] postings, int count, String details) {
|
||||||
|
for(int i=0;i<count;i++)
|
||||||
|
assert postings[i] != null: "postings[" + i + "] of " + count + " is null: " + details;
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void startDocument() throws IOException {
|
public void startDocument() throws IOException {
|
||||||
|
|
Loading…
Reference in New Issue