LUCENE-2137: switch to AtomicInteger for some ref counts

git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@888621 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2009-12-08 23:05:29 +00:00
parent 4e88c6cb93
commit 3df3ab8e9f
5 changed files with 45 additions and 65 deletions

View File

@ -78,6 +78,9 @@ Optimizations
This avoid the overhead of using a PQ unecessarily. (Mike
McCandless)
* LUCENE-2137: Switch to AtomicInteger for some ref counting (Earwin
Burrfoot via Mike McCandless)
Build
Test Cases

View File

@ -42,7 +42,7 @@
<property name="Name" value="Lucene"/>
<property name="dev.version" value="3.1-dev"/>
<property name="version" value="${dev.version}"/>
<property name="compatibility.tag" value="lucene_3_0_back_compat_tests_20091207"/>
<property name="compatibility.tag" value="lucene_3_0_back_compat_tests_20091208"/>
<property name="spec.version" value="${version}"/>
<property name="year" value="2000-${current.year}"/>
<property name="final.name" value="lucene-${name}-${version}"/>

View File

@ -28,6 +28,7 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.search.Similarity;
@ -53,7 +54,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
CloseableThreadLocal<TermVectorsReader> termVectorsLocal = new CloseableThreadLocal<TermVectorsReader>();
BitVector deletedDocs = null;
Ref deletedDocsRef = null;
AtomicInteger deletedDocsRef = null;
private boolean deletedDocsDirty = false;
private boolean normsDirty = false;
private int pendingDeleteCount;
@ -65,7 +66,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
// optionally used for the .nrm file shared by multiple norms
private IndexInput singleNormStream;
private Ref singleNormRef;
private AtomicInteger singleNormRef;
CoreReaders core;
@ -79,7 +80,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
// closed. A given instance of SegmentReader may be
// closed, even those it shares core objects with other
// SegmentReaders:
private final Ref ref = new Ref();
private final AtomicInteger ref = new AtomicInteger(1);
final String segment;
final FieldInfos fieldInfos;
@ -150,7 +151,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
}
synchronized void incRef() {
ref.incRef();
ref.incrementAndGet();
}
synchronized Directory getCFSReader() {
@ -196,7 +197,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
synchronized void decRef() throws IOException {
if (ref.decRef() == 0) {
if (ref.decrementAndGet() == 0) {
// close everything, nothing is shared anymore with other readers
if (tis != null) {
@ -299,31 +300,6 @@ public class SegmentReader extends IndexReader implements Cloneable {
}
}
static class Ref {
private int refCount = 1;
@Override
public String toString() {
return "refcount: "+refCount;
}
public synchronized int refCount() {
return refCount;
}
public synchronized int incRef() {
assert refCount > 0;
refCount++;
return refCount;
}
public synchronized int decRef() {
assert refCount > 0;
refCount--;
return refCount;
}
}
/**
* Byte[] referencing is used because a new norm object needs
* to be created for each clone, and the byte array is all
@ -344,7 +320,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
private long normSeek;
// null until bytes is set
private Ref bytesRef;
private AtomicInteger bytesRef;
private byte[] bytes;
private boolean dirty;
private int number;
@ -369,7 +345,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
} else {
// We are sharing this with others -- decRef and
// maybe close the shared norm stream
if (singleNormRef.decRef() == 0) {
if (singleNormRef.decrementAndGet() == 0) {
singleNormStream.close();
singleNormStream = null;
}
@ -392,7 +368,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
if (bytes != null) {
assert bytesRef != null;
bytesRef.decRef();
bytesRef.decrementAndGet();
bytes = null;
bytesRef = null;
} else {
@ -435,7 +411,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
// byte[]
bytes = origNorm.bytes();
bytesRef = origNorm.bytesRef;
bytesRef.incRef();
bytesRef.incrementAndGet();
// Once we've loaded the bytes we no longer need
// origNorm:
@ -457,7 +433,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
in.readBytes(bytes, 0, count, false);
}
bytesRef = new Ref();
bytesRef = new AtomicInteger(1);
closeInput();
}
}
@ -466,7 +442,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
}
// Only for testing
Ref bytesRef() {
AtomicInteger bytesRef() {
return bytesRef;
}
@ -477,15 +453,15 @@ public class SegmentReader extends IndexReader implements Cloneable {
bytes();
assert bytes != null;
assert bytesRef != null;
if (bytesRef.refCount() > 1) {
if (bytesRef.get() > 1) {
// I cannot be the origNorm for another norm
// instance if I'm being changed. Ie, only the
// "head Norm" can be changed:
assert refCount == 1;
final Ref oldRef = bytesRef;
final AtomicInteger oldRef = bytesRef;
bytes = cloneNormBytes(bytes);
bytesRef = new Ref();
oldRef.decRef();
bytesRef = new AtomicInteger(1);
oldRef.decrementAndGet();
}
dirty = true;
return bytes;
@ -511,7 +487,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
assert origNorm == null;
// Clone holds a reference to my bytes:
clone.bytesRef.incRef();
clone.bytesRef.incrementAndGet();
} else {
assert bytesRef == null;
if (origNorm == null) {
@ -602,7 +578,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
// NOTE: the bitvector is stored using the regular directory, not cfs
if (hasDeletions(si)) {
deletedDocs = new BitVector(directory(), si.getDelFileName());
deletedDocsRef = new Ref();
deletedDocsRef = new AtomicInteger(1);
assert si.getDelCount() == deletedDocs.count() :
"delete count mismatch: info=" + si.getDelCount() + " vs BitVector=" + deletedDocs.count();
@ -696,7 +672,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
if (doClone) {
if (deletedDocs != null) {
deletedDocsRef.incRef();
deletedDocsRef.incrementAndGet();
clone.deletedDocs = deletedDocs;
clone.deletedDocsRef = deletedDocsRef;
}
@ -706,7 +682,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
assert clone.deletedDocs == null;
clone.loadDeletedDocs();
} else if (deletedDocs != null) {
deletedDocsRef.incRef();
deletedDocsRef.incrementAndGet();
clone.deletedDocs = deletedDocs;
clone.deletedDocsRef = deletedDocsRef;
}
@ -784,7 +760,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
fieldsReaderLocal.close();
if (deletedDocs != null) {
deletedDocsRef.decRef();
deletedDocsRef.decrementAndGet();
// null so if an app hangs on to us we still free most ram
deletedDocs = null;
}
@ -820,16 +796,16 @@ public class SegmentReader extends IndexReader implements Cloneable {
protected void doDelete(int docNum) {
if (deletedDocs == null) {
deletedDocs = new BitVector(maxDoc());
deletedDocsRef = new Ref();
deletedDocsRef = new AtomicInteger(1);
}
// there is more than 1 SegmentReader with a reference to this
// deletedDocs BitVector so decRef the current deletedDocsRef,
// clone the BitVector, create a new deletedDocsRef
if (deletedDocsRef.refCount() > 1) {
Ref oldRef = deletedDocsRef;
if (deletedDocsRef.get() > 1) {
AtomicInteger oldRef = deletedDocsRef;
deletedDocs = cloneDeletedDocs(deletedDocs);
deletedDocsRef = new Ref();
oldRef.decRef();
deletedDocsRef = new AtomicInteger(1);
oldRef.decrementAndGet();
}
deletedDocsDirty = true;
if (!deletedDocs.getAndSet(docNum))
@ -841,7 +817,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
deletedDocsDirty = false;
if (deletedDocs != null) {
assert deletedDocsRef != null;
deletedDocsRef.decRef();
deletedDocsRef.decrementAndGet();
deletedDocs = null;
deletedDocsRef = null;
pendingDeleteCount = 0;
@ -1056,9 +1032,9 @@ public class SegmentReader extends IndexReader implements Cloneable {
normSeek = nextNormSeek;
if (singleNormStream == null) {
singleNormStream = d.openInput(fileName, readBufferSize);
singleNormRef = new Ref();
singleNormRef = new AtomicInteger(1);
} else {
singleNormRef.incRef();
singleNormRef.incrementAndGet();
}
// All norms in the .nrm file can share a single IndexInput since
// they are only used in a synchronized context.

View File

@ -331,7 +331,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
assertDelDocsRefCountEquals(1, origSegmentReader);
// check the norm refs
Norm norm = clonedSegmentReader.norms.get("field1");
assertEquals(1, norm.bytesRef().refCount());
assertEquals(1, norm.bytesRef().get());
clonedSegmentReader.close();
dir1.close();
}
@ -442,7 +442,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
}
private void assertDelDocsRefCountEquals(int refCount, SegmentReader reader) {
assertEquals(refCount, reader.deletedDocsRef.refCount());
assertEquals(refCount, reader.deletedDocsRef.get());
}
public void testCloneSubreaders() throws Exception {

View File

@ -21,6 +21,7 @@ import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
@ -173,11 +174,11 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
SegmentReader reader1 = SegmentReader.getOnlySegmentReader(dir1);
reader1.norms("field1");
Norm r1norm = reader1.norms.get("field1");
SegmentReader.Ref r1BytesRef = r1norm.bytesRef();
AtomicInteger r1BytesRef = r1norm.bytesRef();
SegmentReader reader2 = (SegmentReader)reader1.clone();
assertEquals(2, r1norm.bytesRef().refCount());
assertEquals(2, r1norm.bytesRef().get());
reader1.close();
assertEquals(1, r1BytesRef.refCount());
assertEquals(1, r1BytesRef.get());
reader2.norms("field1");
reader2.close();
dir1.close();
@ -192,19 +193,19 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
SegmentReader segmentReader2C = SegmentReader.getOnlySegmentReader(reader2C);
segmentReader2C.norms("field1"); // load the norms for the field
Norm reader2CNorm = segmentReader2C.norms.get("field1");
assertTrue("reader2CNorm.bytesRef()=" + reader2CNorm.bytesRef(), reader2CNorm.bytesRef().refCount() == 2);
assertTrue("reader2CNorm.bytesRef()=" + reader2CNorm.bytesRef(), reader2CNorm.bytesRef().get() == 2);
IndexReader reader3C = (IndexReader) reader2C.clone();
SegmentReader segmentReader3C = SegmentReader.getOnlySegmentReader(reader3C);
Norm reader3CCNorm = segmentReader3C.norms.get("field1");
assertEquals(3, reader3CCNorm.bytesRef().refCount());
assertEquals(3, reader3CCNorm.bytesRef().get());
// edit a norm and the refcount should be 1
IndexReader reader4C = (IndexReader) reader3C.clone();
SegmentReader segmentReader4C = SegmentReader.getOnlySegmentReader(reader4C);
assertEquals(4, reader3CCNorm.bytesRef().refCount());
assertEquals(4, reader3CCNorm.bytesRef().get());
reader4C.setNorm(5, "field1", 0.33f);
// generate a cannot update exception in reader1
@ -219,14 +220,14 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
assertTrue(Similarity.getDefault().decodeNormValue(segmentReader3C.norms("field1")[5])
!= Similarity.getDefault().decodeNormValue(segmentReader4C.norms("field1")[5]));
Norm reader4CCNorm = segmentReader4C.norms.get("field1");
assertEquals(3, reader3CCNorm.bytesRef().refCount());
assertEquals(1, reader4CCNorm.bytesRef().refCount());
assertEquals(3, reader3CCNorm.bytesRef().get());
assertEquals(1, reader4CCNorm.bytesRef().get());
IndexReader reader5C = (IndexReader) reader4C.clone();
SegmentReader segmentReader5C = SegmentReader.getOnlySegmentReader(reader5C);
Norm reader5CCNorm = segmentReader5C.norms.get("field1");
reader5C.setNorm(5, "field1", 0.7f);
assertEquals(1, reader5CCNorm.bytesRef().refCount());
assertEquals(1, reader5CCNorm.bytesRef().get());
reader5C.close();
reader4C.close();