mirror of https://github.com/apache/lucene.git
LUCENE-8166: Require merge instances to be consumed in the thread that created them.
This commit is contained in:
parent
cf828163bd
commit
577bef53dd
|
@ -91,7 +91,7 @@ final class Lucene70NormsProducer extends NormsProducer implements Cloneable {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public NormsProducer getMergeInstance() throws IOException {
|
public NormsProducer getMergeInstance() {
|
||||||
Lucene70NormsProducer clone;
|
Lucene70NormsProducer clone;
|
||||||
try {
|
try {
|
||||||
clone = (Lucene70NormsProducer) super.clone();
|
clone = (Lucene70NormsProducer) super.clone();
|
||||||
|
|
|
@ -80,7 +80,7 @@ class DirectDocValuesProducer extends DocValuesProducer {
|
||||||
static final int VERSION_CURRENT = VERSION_START;
|
static final int VERSION_CURRENT = VERSION_START;
|
||||||
|
|
||||||
// clone for merge: when merging we don't do any instances.put()s
|
// clone for merge: when merging we don't do any instances.put()s
|
||||||
DirectDocValuesProducer(DirectDocValuesProducer original) throws IOException {
|
DirectDocValuesProducer(DirectDocValuesProducer original) {
|
||||||
assert Thread.holdsLock(original);
|
assert Thread.holdsLock(original);
|
||||||
numerics.putAll(original.numerics);
|
numerics.putAll(original.numerics);
|
||||||
binaries.putAll(original.binaries);
|
binaries.putAll(original.binaries);
|
||||||
|
@ -606,7 +606,7 @@ class DirectDocValuesProducer extends DocValuesProducer {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized DocValuesProducer getMergeInstance() throws IOException {
|
public synchronized DocValuesProducer getMergeInstance() {
|
||||||
return new DirectDocValuesProducer(this);
|
return new DirectDocValuesProducer(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -74,10 +74,11 @@ public abstract class DocValuesProducer implements Closeable, Accountable {
|
||||||
public abstract void checkIntegrity() throws IOException;
|
public abstract void checkIntegrity() throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns an instance optimized for merging.
|
* Returns an instance optimized for merging. This instance may only be
|
||||||
|
* consumed in the thread that called {@link #getMergeInstance()}.
|
||||||
* <p>
|
* <p>
|
||||||
* The default implementation returns {@code this} */
|
* The default implementation returns {@code this} */
|
||||||
public DocValuesProducer getMergeInstance() throws IOException {
|
public DocValuesProducer getMergeInstance() {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -48,10 +48,11 @@ public abstract class FieldsProducer extends Fields implements Closeable, Accoun
|
||||||
public abstract void checkIntegrity() throws IOException;
|
public abstract void checkIntegrity() throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns an instance optimized for merging.
|
* Returns an instance optimized for merging. This instance may only be
|
||||||
|
* consumed in the thread that called {@link #getMergeInstance()}.
|
||||||
* <p>
|
* <p>
|
||||||
* The default implementation returns {@code this} */
|
* The default implementation returns {@code this} */
|
||||||
public FieldsProducer getMergeInstance() throws IOException {
|
public FieldsProducer getMergeInstance() {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,10 +49,11 @@ public abstract class NormsProducer implements Closeable, Accountable {
|
||||||
public abstract void checkIntegrity() throws IOException;
|
public abstract void checkIntegrity() throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns an instance optimized for merging.
|
* Returns an instance optimized for merging. This instance may only be used
|
||||||
|
* from the thread that acquires it.
|
||||||
* <p>
|
* <p>
|
||||||
* The default implementation returns {@code this} */
|
* The default implementation returns {@code this} */
|
||||||
public NormsProducer getMergeInstance() throws IOException {
|
public NormsProducer getMergeInstance() {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,10 +45,11 @@ public abstract class PointsReader implements Closeable, Accountable {
|
||||||
public abstract PointValues getValues(String field) throws IOException;
|
public abstract PointValues getValues(String field) throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns an instance optimized for merging.
|
* Returns an instance optimized for merging. This instance may only be used
|
||||||
|
* in the thread that acquires it.
|
||||||
* <p>
|
* <p>
|
||||||
* The default implementation returns {@code this} */
|
* The default implementation returns {@code this} */
|
||||||
public PointsReader getMergeInstance() throws IOException {
|
public PointsReader getMergeInstance() {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,10 +52,10 @@ public abstract class StoredFieldsReader implements Cloneable, Closeable, Accoun
|
||||||
public abstract void checkIntegrity() throws IOException;
|
public abstract void checkIntegrity() throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns an instance optimized for merging.
|
* Returns an instance optimized for merging. This instance may not be cloned.
|
||||||
* <p>
|
* <p>
|
||||||
* The default implementation returns {@code this} */
|
* The default implementation returns {@code this} */
|
||||||
public StoredFieldsReader getMergeInstance() throws IOException {
|
public StoredFieldsReader getMergeInstance() {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -57,10 +57,11 @@ public abstract class TermVectorsReader implements Cloneable, Closeable, Account
|
||||||
public abstract TermVectorsReader clone();
|
public abstract TermVectorsReader clone();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns an instance optimized for merging.
|
* Returns an instance optimized for merging. This instance may only be
|
||||||
|
* consumed in the thread that called {@link #getMergeInstance()}.
|
||||||
* <p>
|
* <p>
|
||||||
* The default implementation returns {@code this} */
|
* The default implementation returns {@code this} */
|
||||||
public TermVectorsReader getMergeInstance() throws IOException {
|
public TermVectorsReader getMergeInstance() {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -92,7 +92,7 @@ final class Lucene80NormsProducer extends NormsProducer implements Cloneable {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public NormsProducer getMergeInstance() throws IOException {
|
public NormsProducer getMergeInstance() {
|
||||||
Lucene80NormsProducer clone;
|
Lucene80NormsProducer clone;
|
||||||
try {
|
try {
|
||||||
clone = (Lucene80NormsProducer) super.clone();
|
clone = (Lucene80NormsProducer) super.clone();
|
||||||
|
@ -233,18 +233,79 @@ final class Lucene80NormsProducer extends NormsProducer implements Cloneable {
|
||||||
}
|
}
|
||||||
|
|
||||||
private IndexInput getDisiInput(FieldInfo field, NormsEntry entry) throws IOException {
|
private IndexInput getDisiInput(FieldInfo field, NormsEntry entry) throws IOException {
|
||||||
IndexInput slice = null;
|
if (merging == false) {
|
||||||
if (merging) {
|
return IndexedDISI.createBlockSlice(
|
||||||
slice = disiInputs.get(field.number);
|
|
||||||
}
|
|
||||||
if (slice == null) {
|
|
||||||
slice = IndexedDISI.createBlockSlice(
|
|
||||||
data, "docs", entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.jumpTableEntryCount);
|
data, "docs", entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.jumpTableEntryCount);
|
||||||
if (merging) {
|
|
||||||
disiInputs.put(field.number, slice);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
IndexInput in = disiInputs.get(field.number);
|
||||||
|
if (in == null) {
|
||||||
|
in = IndexedDISI.createBlockSlice(
|
||||||
|
data, "docs", entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.jumpTableEntryCount);
|
||||||
|
disiInputs.put(field.number, in);
|
||||||
}
|
}
|
||||||
return slice;
|
|
||||||
|
final IndexInput inF = in; // same as in but final
|
||||||
|
|
||||||
|
// Wrap so that reads can be interleaved from the same thread if two
|
||||||
|
// norms instances are pulled and consumed in parallel. Merging usually
|
||||||
|
// doesn't need this feature but CheckIndex might, plus we need merge
|
||||||
|
// instances to behave well and not be trappy.
|
||||||
|
return new IndexInput("docs") {
|
||||||
|
|
||||||
|
long offset = 0;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void readBytes(byte[] b, int off, int len) throws IOException {
|
||||||
|
inF.seek(offset);
|
||||||
|
offset += len;
|
||||||
|
inF.readBytes(b, off, len);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public byte readByte() throws IOException {
|
||||||
|
throw new UnsupportedOperationException("Unused by IndexedDISI");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public IndexInput slice(String sliceDescription, long offset, long length) throws IOException {
|
||||||
|
throw new UnsupportedOperationException("Unused by IndexedDISI");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public short readShort() throws IOException {
|
||||||
|
inF.seek(offset);
|
||||||
|
offset += Short.BYTES;
|
||||||
|
return inF.readShort();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long readLong() throws IOException {
|
||||||
|
inF.seek(offset);
|
||||||
|
offset += Long.BYTES;
|
||||||
|
return inF.readLong();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void seek(long pos) throws IOException {
|
||||||
|
offset = pos;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long length() {
|
||||||
|
throw new UnsupportedOperationException("Unused by IndexedDISI");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getFilePointer() {
|
||||||
|
return offset;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() throws IOException {
|
||||||
|
throw new UnsupportedOperationException("Unused by IndexedDISI");
|
||||||
|
}
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
private RandomAccessInput getDisiJumpTable(FieldInfo field, NormsEntry entry) throws IOException {
|
private RandomAccessInput getDisiJumpTable(FieldInfo field, NormsEntry entry) throws IOException {
|
||||||
|
@ -327,7 +388,7 @@ final class Lucene80NormsProducer extends NormsProducer implements Cloneable {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
final RandomAccessInput slice = data.randomAccessSlice(entry.normsOffset, entry.numDocsWithField * (long) entry.bytesPerNorm);
|
final RandomAccessInput slice = getDataInput(field, entry);
|
||||||
switch (entry.bytesPerNorm) {
|
switch (entry.bytesPerNorm) {
|
||||||
case 1:
|
case 1:
|
||||||
return new SparseNormsIterator(disi) {
|
return new SparseNormsIterator(disi) {
|
||||||
|
|
|
@ -261,7 +261,7 @@ public abstract class PerFieldDocValuesFormat extends DocValuesFormat {
|
||||||
private final Map<String,DocValuesProducer> formats = new HashMap<>();
|
private final Map<String,DocValuesProducer> formats = new HashMap<>();
|
||||||
|
|
||||||
// clone for merge
|
// clone for merge
|
||||||
FieldsReader(FieldsReader other) throws IOException {
|
FieldsReader(FieldsReader other) {
|
||||||
Map<DocValuesProducer,DocValuesProducer> oldToNew = new IdentityHashMap<>();
|
Map<DocValuesProducer,DocValuesProducer> oldToNew = new IdentityHashMap<>();
|
||||||
// First clone all formats
|
// First clone all formats
|
||||||
for(Map.Entry<String,DocValuesProducer> ent : other.formats.entrySet()) {
|
for(Map.Entry<String,DocValuesProducer> ent : other.formats.entrySet()) {
|
||||||
|
@ -368,7 +368,7 @@ public abstract class PerFieldDocValuesFormat extends DocValuesFormat {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocValuesProducer getMergeInstance() throws IOException {
|
public DocValuesProducer getMergeInstance() {
|
||||||
return new FieldsReader(this);
|
return new FieldsReader(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -247,7 +247,7 @@ public abstract class PerFieldPostingsFormat extends PostingsFormat {
|
||||||
private final String segment;
|
private final String segment;
|
||||||
|
|
||||||
// clone for merge
|
// clone for merge
|
||||||
FieldsReader(FieldsReader other) throws IOException {
|
FieldsReader(FieldsReader other) {
|
||||||
Map<FieldsProducer,FieldsProducer> oldToNew = new IdentityHashMap<>();
|
Map<FieldsProducer,FieldsProducer> oldToNew = new IdentityHashMap<>();
|
||||||
// First clone all formats
|
// First clone all formats
|
||||||
for(Map.Entry<String,FieldsProducer> ent : other.formats.entrySet()) {
|
for(Map.Entry<String,FieldsProducer> ent : other.formats.entrySet()) {
|
||||||
|
@ -346,7 +346,7 @@ public abstract class PerFieldPostingsFormat extends PostingsFormat {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FieldsProducer getMergeInstance() throws IOException {
|
public FieldsProducer getMergeInstance() {
|
||||||
return new FieldsReader(this);
|
return new FieldsReader(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,29 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.lucene.codecs.lucene50;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test the merge instance of the Lucene50 stored fields format.
|
||||||
|
*/
|
||||||
|
public class TestLucene50StoredFieldsFormatMergeInstance extends TestLucene50StoredFieldsFormat {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean shouldTestMergeInstance() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,29 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.lucene.codecs.lucene80;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test the merge instance of the Lucene80 norms format.
|
||||||
|
*/
|
||||||
|
public class TestLucene80NormsFormatMergeInstance extends TestLucene80NormsFormat {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean shouldTestMergeInstance() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -222,7 +222,7 @@ public class TestMultiTermsEnum extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FieldsProducer getMergeInstance() throws IOException {
|
public FieldsProducer getMergeInstance() {
|
||||||
return create(delegate.getMergeInstance(), newFieldInfo);
|
return create(delegate.getMergeInstance(), newFieldInfo);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -133,7 +133,7 @@ final class CompletionFieldsProducer extends FieldsProducer {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FieldsProducer getMergeInstance() throws IOException {
|
public FieldsProducer getMergeInstance() {
|
||||||
return new CompletionFieldsProducer(delegateFieldsProducer, readers);
|
return new CompletionFieldsProducer(delegateFieldsProducer, readers);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -33,6 +33,14 @@ import org.apache.lucene.util.TestUtil;
|
||||||
*/
|
*/
|
||||||
public class AssertingCodec extends FilterCodec {
|
public class AssertingCodec extends FilterCodec {
|
||||||
|
|
||||||
|
static void assertThread(String object, Thread creationThread) {
|
||||||
|
if (creationThread != Thread.currentThread()) {
|
||||||
|
throw new AssertionError(object + " are only supposed to be consumed in "
|
||||||
|
+ "the thread in which they have been acquired. But was acquired in "
|
||||||
|
+ creationThread + " and consumed in " + Thread.currentThread() + ".");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private final PostingsFormat postings = new PerFieldPostingsFormat() {
|
private final PostingsFormat postings = new PerFieldPostingsFormat() {
|
||||||
@Override
|
@Override
|
||||||
public PostingsFormat getPostingsFormatForField(String field) {
|
public PostingsFormat getPostingsFormatForField(String field) {
|
||||||
|
|
|
@ -62,7 +62,7 @@ public class AssertingDocValuesFormat extends DocValuesFormat {
|
||||||
assert state.fieldInfos.hasDocValues();
|
assert state.fieldInfos.hasDocValues();
|
||||||
DocValuesProducer producer = in.fieldsProducer(state);
|
DocValuesProducer producer = in.fieldsProducer(state);
|
||||||
assert producer != null;
|
assert producer != null;
|
||||||
return new AssertingDocValuesProducer(producer, state.segmentInfo.maxDoc());
|
return new AssertingDocValuesProducer(producer, state.segmentInfo.maxDoc(), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
static class AssertingDocValuesConsumer extends DocValuesConsumer {
|
static class AssertingDocValuesConsumer extends DocValuesConsumer {
|
||||||
|
@ -219,10 +219,14 @@ public class AssertingDocValuesFormat extends DocValuesFormat {
|
||||||
static class AssertingDocValuesProducer extends DocValuesProducer {
|
static class AssertingDocValuesProducer extends DocValuesProducer {
|
||||||
private final DocValuesProducer in;
|
private final DocValuesProducer in;
|
||||||
private final int maxDoc;
|
private final int maxDoc;
|
||||||
|
private final boolean merging;
|
||||||
|
private final Thread creationThread;
|
||||||
|
|
||||||
AssertingDocValuesProducer(DocValuesProducer in, int maxDoc) {
|
AssertingDocValuesProducer(DocValuesProducer in, int maxDoc, boolean merging) {
|
||||||
this.in = in;
|
this.in = in;
|
||||||
this.maxDoc = maxDoc;
|
this.maxDoc = maxDoc;
|
||||||
|
this.merging = merging;
|
||||||
|
this.creationThread = Thread.currentThread();
|
||||||
// do a few simple checks on init
|
// do a few simple checks on init
|
||||||
assert toString() != null;
|
assert toString() != null;
|
||||||
assert ramBytesUsed() >= 0;
|
assert ramBytesUsed() >= 0;
|
||||||
|
@ -231,6 +235,9 @@ public class AssertingDocValuesFormat extends DocValuesFormat {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public NumericDocValues getNumeric(FieldInfo field) throws IOException {
|
public NumericDocValues getNumeric(FieldInfo field) throws IOException {
|
||||||
|
if (merging) {
|
||||||
|
AssertingCodec.assertThread("DocValuesProducer", creationThread);
|
||||||
|
}
|
||||||
assert field.getDocValuesType() == DocValuesType.NUMERIC;
|
assert field.getDocValuesType() == DocValuesType.NUMERIC;
|
||||||
NumericDocValues values = in.getNumeric(field);
|
NumericDocValues values = in.getNumeric(field);
|
||||||
assert values != null;
|
assert values != null;
|
||||||
|
@ -239,6 +246,9 @@ public class AssertingDocValuesFormat extends DocValuesFormat {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BinaryDocValues getBinary(FieldInfo field) throws IOException {
|
public BinaryDocValues getBinary(FieldInfo field) throws IOException {
|
||||||
|
if (merging) {
|
||||||
|
AssertingCodec.assertThread("DocValuesProducer", creationThread);
|
||||||
|
}
|
||||||
assert field.getDocValuesType() == DocValuesType.BINARY;
|
assert field.getDocValuesType() == DocValuesType.BINARY;
|
||||||
BinaryDocValues values = in.getBinary(field);
|
BinaryDocValues values = in.getBinary(field);
|
||||||
assert values != null;
|
assert values != null;
|
||||||
|
@ -247,6 +257,9 @@ public class AssertingDocValuesFormat extends DocValuesFormat {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SortedDocValues getSorted(FieldInfo field) throws IOException {
|
public SortedDocValues getSorted(FieldInfo field) throws IOException {
|
||||||
|
if (merging) {
|
||||||
|
AssertingCodec.assertThread("DocValuesProducer", creationThread);
|
||||||
|
}
|
||||||
assert field.getDocValuesType() == DocValuesType.SORTED;
|
assert field.getDocValuesType() == DocValuesType.SORTED;
|
||||||
SortedDocValues values = in.getSorted(field);
|
SortedDocValues values = in.getSorted(field);
|
||||||
assert values != null;
|
assert values != null;
|
||||||
|
@ -255,6 +268,9 @@ public class AssertingDocValuesFormat extends DocValuesFormat {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException {
|
public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException {
|
||||||
|
if (merging) {
|
||||||
|
AssertingCodec.assertThread("DocValuesProducer", creationThread);
|
||||||
|
}
|
||||||
assert field.getDocValuesType() == DocValuesType.SORTED_NUMERIC;
|
assert field.getDocValuesType() == DocValuesType.SORTED_NUMERIC;
|
||||||
SortedNumericDocValues values = in.getSortedNumeric(field);
|
SortedNumericDocValues values = in.getSortedNumeric(field);
|
||||||
assert values != null;
|
assert values != null;
|
||||||
|
@ -263,6 +279,9 @@ public class AssertingDocValuesFormat extends DocValuesFormat {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException {
|
public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException {
|
||||||
|
if (merging) {
|
||||||
|
AssertingCodec.assertThread("DocValuesProducer", creationThread);
|
||||||
|
}
|
||||||
assert field.getDocValuesType() == DocValuesType.SORTED_SET;
|
assert field.getDocValuesType() == DocValuesType.SORTED_SET;
|
||||||
SortedSetDocValues values = in.getSortedSet(field);
|
SortedSetDocValues values = in.getSortedSet(field);
|
||||||
assert values != null;
|
assert values != null;
|
||||||
|
@ -295,8 +314,8 @@ public class AssertingDocValuesFormat extends DocValuesFormat {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DocValuesProducer getMergeInstance() throws IOException {
|
public DocValuesProducer getMergeInstance() {
|
||||||
return new AssertingDocValuesProducer(in.getMergeInstance(), maxDoc);
|
return new AssertingDocValuesProducer(in.getMergeInstance(), maxDoc, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -50,7 +50,7 @@ public class AssertingNormsFormat extends NormsFormat {
|
||||||
assert state.fieldInfos.hasNorms();
|
assert state.fieldInfos.hasNorms();
|
||||||
NormsProducer producer = in.normsProducer(state);
|
NormsProducer producer = in.normsProducer(state);
|
||||||
assert producer != null;
|
assert producer != null;
|
||||||
return new AssertingNormsProducer(producer, state.segmentInfo.maxDoc());
|
return new AssertingNormsProducer(producer, state.segmentInfo.maxDoc(), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
static class AssertingNormsConsumer extends NormsConsumer {
|
static class AssertingNormsConsumer extends NormsConsumer {
|
||||||
|
@ -88,10 +88,14 @@ public class AssertingNormsFormat extends NormsFormat {
|
||||||
static class AssertingNormsProducer extends NormsProducer {
|
static class AssertingNormsProducer extends NormsProducer {
|
||||||
private final NormsProducer in;
|
private final NormsProducer in;
|
||||||
private final int maxDoc;
|
private final int maxDoc;
|
||||||
|
private final boolean merging;
|
||||||
|
private final Thread creationThread;
|
||||||
|
|
||||||
AssertingNormsProducer(NormsProducer in, int maxDoc) {
|
AssertingNormsProducer(NormsProducer in, int maxDoc, boolean merging) {
|
||||||
this.in = in;
|
this.in = in;
|
||||||
this.maxDoc = maxDoc;
|
this.maxDoc = maxDoc;
|
||||||
|
this.merging = merging;
|
||||||
|
this.creationThread = Thread.currentThread();
|
||||||
// do a few simple checks on init
|
// do a few simple checks on init
|
||||||
assert toString() != null;
|
assert toString() != null;
|
||||||
assert ramBytesUsed() >= 0;
|
assert ramBytesUsed() >= 0;
|
||||||
|
@ -100,6 +104,9 @@ public class AssertingNormsFormat extends NormsFormat {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public NumericDocValues getNorms(FieldInfo field) throws IOException {
|
public NumericDocValues getNorms(FieldInfo field) throws IOException {
|
||||||
|
if (merging) {
|
||||||
|
AssertingCodec.assertThread("NormsProducer", creationThread);
|
||||||
|
}
|
||||||
assert field.hasNorms();
|
assert field.hasNorms();
|
||||||
NumericDocValues values = in.getNorms(field);
|
NumericDocValues values = in.getNorms(field);
|
||||||
assert values != null;
|
assert values != null;
|
||||||
|
@ -132,8 +139,8 @@ public class AssertingNormsFormat extends NormsFormat {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public NormsProducer getMergeInstance() throws IOException {
|
public NormsProducer getMergeInstance() {
|
||||||
return new AssertingNormsProducer(in.getMergeInstance(), maxDoc);
|
return new AssertingNormsProducer(in.getMergeInstance(), maxDoc, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -60,17 +60,21 @@ public final class AssertingPointsFormat extends PointsFormat {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PointsReader fieldsReader(SegmentReadState state) throws IOException {
|
public PointsReader fieldsReader(SegmentReadState state) throws IOException {
|
||||||
return new AssertingPointsReader(state.segmentInfo.maxDoc(), in.fieldsReader(state));
|
return new AssertingPointsReader(state.segmentInfo.maxDoc(), in.fieldsReader(state), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static class AssertingPointsReader extends PointsReader {
|
static class AssertingPointsReader extends PointsReader {
|
||||||
private final PointsReader in;
|
private final PointsReader in;
|
||||||
private final int maxDoc;
|
private final int maxDoc;
|
||||||
|
private final boolean merging;
|
||||||
|
private final Thread creationThread;
|
||||||
|
|
||||||
AssertingPointsReader(int maxDoc, PointsReader in) {
|
AssertingPointsReader(int maxDoc, PointsReader in, boolean merging) {
|
||||||
this.in = in;
|
this.in = in;
|
||||||
this.maxDoc = maxDoc;
|
this.maxDoc = maxDoc;
|
||||||
|
this.merging = merging;
|
||||||
|
this.creationThread = Thread.currentThread();
|
||||||
// do a few simple checks on init
|
// do a few simple checks on init
|
||||||
assert toString() != null;
|
assert toString() != null;
|
||||||
assert ramBytesUsed() >= 0;
|
assert ramBytesUsed() >= 0;
|
||||||
|
@ -85,6 +89,9 @@ public final class AssertingPointsFormat extends PointsFormat {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PointValues getValues(String field) throws IOException {
|
public PointValues getValues(String field) throws IOException {
|
||||||
|
if (merging) {
|
||||||
|
AssertingCodec.assertThread("PointsReader", creationThread);
|
||||||
|
}
|
||||||
PointValues values = this.in.getValues(field);
|
PointValues values = this.in.getValues(field);
|
||||||
if (values == null) {
|
if (values == null) {
|
||||||
return null;
|
return null;
|
||||||
|
@ -112,8 +119,8 @@ public final class AssertingPointsFormat extends PointsFormat {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PointsReader getMergeInstance() throws IOException {
|
public PointsReader getMergeInstance() {
|
||||||
return new AssertingPointsReader(maxDoc, in.getMergeInstance());
|
return new AssertingPointsReader(maxDoc, in.getMergeInstance(), true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -114,7 +114,7 @@ public final class AssertingPostingsFormat extends PostingsFormat {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FieldsProducer getMergeInstance() throws IOException {
|
public FieldsProducer getMergeInstance() {
|
||||||
return new AssertingFieldsProducer(in.getMergeInstance());
|
return new AssertingFieldsProducer(in.getMergeInstance());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -40,7 +40,7 @@ public class AssertingStoredFieldsFormat extends StoredFieldsFormat {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public StoredFieldsReader fieldsReader(Directory directory, SegmentInfo si, FieldInfos fn, IOContext context) throws IOException {
|
public StoredFieldsReader fieldsReader(Directory directory, SegmentInfo si, FieldInfos fn, IOContext context) throws IOException {
|
||||||
return new AssertingStoredFieldsReader(in.fieldsReader(directory, si, fn, context), si.maxDoc());
|
return new AssertingStoredFieldsReader(in.fieldsReader(directory, si, fn, context), si.maxDoc(), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -51,10 +51,14 @@ public class AssertingStoredFieldsFormat extends StoredFieldsFormat {
|
||||||
static class AssertingStoredFieldsReader extends StoredFieldsReader {
|
static class AssertingStoredFieldsReader extends StoredFieldsReader {
|
||||||
private final StoredFieldsReader in;
|
private final StoredFieldsReader in;
|
||||||
private final int maxDoc;
|
private final int maxDoc;
|
||||||
|
private final boolean merging;
|
||||||
|
private final Thread creationThread;
|
||||||
|
|
||||||
AssertingStoredFieldsReader(StoredFieldsReader in, int maxDoc) {
|
AssertingStoredFieldsReader(StoredFieldsReader in, int maxDoc, boolean merging) {
|
||||||
this.in = in;
|
this.in = in;
|
||||||
this.maxDoc = maxDoc;
|
this.maxDoc = maxDoc;
|
||||||
|
this.merging = merging;
|
||||||
|
this.creationThread = Thread.currentThread();
|
||||||
// do a few simple checks on init
|
// do a few simple checks on init
|
||||||
assert toString() != null;
|
assert toString() != null;
|
||||||
assert ramBytesUsed() >= 0;
|
assert ramBytesUsed() >= 0;
|
||||||
|
@ -69,13 +73,15 @@ public class AssertingStoredFieldsFormat extends StoredFieldsFormat {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void visitDocument(int n, StoredFieldVisitor visitor) throws IOException {
|
public void visitDocument(int n, StoredFieldVisitor visitor) throws IOException {
|
||||||
|
AssertingCodec.assertThread("StoredFieldsReader", creationThread);
|
||||||
assert n >= 0 && n < maxDoc;
|
assert n >= 0 && n < maxDoc;
|
||||||
in.visitDocument(n, visitor);
|
in.visitDocument(n, visitor);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public StoredFieldsReader clone() {
|
public StoredFieldsReader clone() {
|
||||||
return new AssertingStoredFieldsReader(in.clone(), maxDoc);
|
assert merging == false : "Merge instances do not support cloning";
|
||||||
|
return new AssertingStoredFieldsReader(in.clone(), maxDoc, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -98,8 +104,8 @@ public class AssertingStoredFieldsFormat extends StoredFieldsFormat {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public StoredFieldsReader getMergeInstance() throws IOException {
|
public StoredFieldsReader getMergeInstance() {
|
||||||
return new AssertingStoredFieldsReader(in.getMergeInstance(), maxDoc);
|
return new AssertingStoredFieldsReader(in.getMergeInstance(), maxDoc, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -97,7 +97,7 @@ public class AssertingTermVectorsFormat extends TermVectorsFormat {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TermVectorsReader getMergeInstance() throws IOException {
|
public TermVectorsReader getMergeInstance() {
|
||||||
return new AssertingTermVectorsReader(in.getMergeInstance());
|
return new AssertingTermVectorsReader(in.getMergeInstance());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1028,7 +1028,7 @@ public class AssertingLeafReader extends FilterLeafReader {
|
||||||
|
|
||||||
/** Wraps a SortedSetDocValues but with additional asserts */
|
/** Wraps a SortedSetDocValues but with additional asserts */
|
||||||
public static class AssertingPointValues extends PointValues {
|
public static class AssertingPointValues extends PointValues {
|
||||||
|
private final Thread creationThread = Thread.currentThread();
|
||||||
private final PointValues in;
|
private final PointValues in;
|
||||||
|
|
||||||
/** Sole constructor. */
|
/** Sole constructor. */
|
||||||
|
@ -1048,11 +1048,13 @@ public class AssertingLeafReader extends FilterLeafReader {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void intersect(IntersectVisitor visitor) throws IOException {
|
public void intersect(IntersectVisitor visitor) throws IOException {
|
||||||
|
assertThread("Points", creationThread);
|
||||||
in.intersect(new AssertingIntersectVisitor(in.getNumDataDimensions(), in.getNumIndexDimensions(), in.getBytesPerDimension(), visitor));
|
in.intersect(new AssertingIntersectVisitor(in.getNumDataDimensions(), in.getNumIndexDimensions(), in.getBytesPerDimension(), visitor));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long estimatePointCount(IntersectVisitor visitor) {
|
public long estimatePointCount(IntersectVisitor visitor) {
|
||||||
|
assertThread("Points", creationThread);
|
||||||
long cost = in.estimatePointCount(visitor);
|
long cost = in.estimatePointCount(visitor);
|
||||||
assert cost >= 0;
|
assert cost >= 0;
|
||||||
return cost;
|
return cost;
|
||||||
|
@ -1060,36 +1062,43 @@ public class AssertingLeafReader extends FilterLeafReader {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] getMinPackedValue() throws IOException {
|
public byte[] getMinPackedValue() throws IOException {
|
||||||
|
assertThread("Points", creationThread);
|
||||||
return Objects.requireNonNull(in.getMinPackedValue());
|
return Objects.requireNonNull(in.getMinPackedValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte[] getMaxPackedValue() throws IOException {
|
public byte[] getMaxPackedValue() throws IOException {
|
||||||
|
assertThread("Points", creationThread);
|
||||||
return Objects.requireNonNull(in.getMaxPackedValue());
|
return Objects.requireNonNull(in.getMaxPackedValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getNumDataDimensions() throws IOException {
|
public int getNumDataDimensions() throws IOException {
|
||||||
|
assertThread("Points", creationThread);
|
||||||
return in.getNumDataDimensions();
|
return in.getNumDataDimensions();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getNumIndexDimensions() throws IOException {
|
public int getNumIndexDimensions() throws IOException {
|
||||||
|
assertThread("Points", creationThread);
|
||||||
return in.getNumIndexDimensions();
|
return in.getNumIndexDimensions();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getBytesPerDimension() throws IOException {
|
public int getBytesPerDimension() throws IOException {
|
||||||
|
assertThread("Points", creationThread);
|
||||||
return in.getBytesPerDimension();
|
return in.getBytesPerDimension();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long size() {
|
public long size() {
|
||||||
|
assertThread("Points", creationThread);
|
||||||
return in.size();
|
return in.size();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int getDocCount() {
|
public int getDocCount() {
|
||||||
|
assertThread("Points", creationThread);
|
||||||
return in.getDocCount();
|
return in.getDocCount();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.lucene.index;
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.PrintStream;
|
import java.io.PrintStream;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
@ -131,9 +132,15 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
|
||||||
queue.addAll(map.values());
|
queue.addAll(map.values());
|
||||||
v = 2L * map.size() * RamUsageEstimator.NUM_BYTES_OBJECT_REF;
|
v = 2L * map.size() * RamUsageEstimator.NUM_BYTES_OBJECT_REF;
|
||||||
} else {
|
} else {
|
||||||
v = super.accumulateObject(o, shallowSize, fieldValues, queue);
|
List<Object> references = new ArrayList<>();
|
||||||
|
v = super.accumulateObject(o, shallowSize, fieldValues, references);
|
||||||
|
for (Object r : references) {
|
||||||
|
// AssertingCodec adds Thread references to make sure objects are consumed in the right thread
|
||||||
|
if (r instanceof Thread == false) {
|
||||||
|
queue.add(r);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// System.out.println(o.getClass() + "=" + v);
|
|
||||||
return v;
|
return v;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -698,4 +705,19 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
|
||||||
|
|
||||||
Rethrow.rethrow(e);
|
Rethrow.rethrow(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns {@code false} if only the regular fields reader should be tested,
|
||||||
|
* and {@code true} if only the merge instance should be tested.
|
||||||
|
*/
|
||||||
|
protected boolean shouldTestMergeInstance() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected final DirectoryReader maybeWrapWithMergingReader(DirectoryReader r) throws IOException {
|
||||||
|
if (shouldTestMergeInstance()) {
|
||||||
|
r = new MergingDirectoryReaderWrapper(r);
|
||||||
|
}
|
||||||
|
return r;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,6 +37,7 @@ import org.apache.lucene.search.TermStatistics;
|
||||||
import org.apache.lucene.search.similarities.Similarity;
|
import org.apache.lucene.search.similarities.Similarity;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.FixedBitSet;
|
import org.apache.lucene.util.FixedBitSet;
|
||||||
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.util.TestUtil;
|
import org.apache.lucene.util.TestUtil;
|
||||||
|
|
||||||
import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS;
|
import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS;
|
||||||
|
@ -491,14 +492,14 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
|
||||||
writer.commit();
|
writer.commit();
|
||||||
|
|
||||||
// compare
|
// compare
|
||||||
DirectoryReader ir = DirectoryReader.open(dir);
|
DirectoryReader ir = maybeWrapWithMergingReader(DirectoryReader.open(dir));
|
||||||
checkNormsVsDocValues(ir);
|
checkNormsVsDocValues(ir);
|
||||||
ir.close();
|
ir.close();
|
||||||
|
|
||||||
writer.forceMerge(1);
|
writer.forceMerge(1);
|
||||||
|
|
||||||
// compare again
|
// compare again
|
||||||
ir = DirectoryReader.open(dir);
|
ir = maybeWrapWithMergingReader(DirectoryReader.open(dir));
|
||||||
checkNormsVsDocValues(ir);
|
checkNormsVsDocValues(ir);
|
||||||
|
|
||||||
writer.close();
|
writer.close();
|
||||||
|
@ -605,7 +606,7 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
|
||||||
w.deleteDocuments(new Term("id", ""+id));
|
w.deleteDocuments(new Term("id", ""+id));
|
||||||
}
|
}
|
||||||
w.forceMerge(1);
|
w.forceMerge(1);
|
||||||
IndexReader r = w.getReader();
|
IndexReader r = maybeWrapWithMergingReader(w.getReader());
|
||||||
assertFalse(r.hasDeletions());
|
assertFalse(r.hasDeletions());
|
||||||
|
|
||||||
// Confusingly, norms should exist, and should all be 0, even though we deleted all docs that had the field "content". They should not
|
// Confusingly, norms should exist, and should all be 0, even though we deleted all docs that had the field "content". They should not
|
||||||
|
@ -679,7 +680,7 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
DirectoryReader reader = writer.getReader();
|
DirectoryReader reader = maybeWrapWithMergingReader(writer.getReader());
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
final int numThreads = TestUtil.nextInt(random(), 3, 30);
|
final int numThreads = TestUtil.nextInt(random(), 3, 30);
|
||||||
|
@ -711,4 +712,72 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas
|
||||||
reader.close();
|
reader.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testIndependantIterators() throws IOException {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
IndexWriterConfig conf = newIndexWriterConfig().setMergePolicy(newLogMergePolicy());
|
||||||
|
CannedNormSimilarity sim = new CannedNormSimilarity(new long[] {42, 10, 20});
|
||||||
|
conf.setSimilarity(sim);
|
||||||
|
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, conf);
|
||||||
|
Document doc = new Document();
|
||||||
|
Field indexedField = new TextField("indexed", "a", Field.Store.NO);
|
||||||
|
doc.add(indexedField);
|
||||||
|
for (int i = 0; i < 3; ++i) {
|
||||||
|
writer.addDocument(doc);
|
||||||
|
}
|
||||||
|
writer.forceMerge(1);
|
||||||
|
LeafReader r = getOnlyLeafReader(maybeWrapWithMergingReader(writer.getReader()));
|
||||||
|
NumericDocValues n1 = r.getNormValues("indexed");
|
||||||
|
NumericDocValues n2 = r.getNormValues("indexed");
|
||||||
|
assertEquals(0, n1.nextDoc());
|
||||||
|
assertEquals(42, n1.longValue());
|
||||||
|
assertEquals(1, n1.nextDoc());
|
||||||
|
assertEquals(10, n1.longValue());
|
||||||
|
assertEquals(0, n2.nextDoc());
|
||||||
|
assertEquals(42, n2.longValue());
|
||||||
|
assertEquals(1, n2.nextDoc());
|
||||||
|
assertEquals(10, n2.longValue());
|
||||||
|
assertEquals(2, n2.nextDoc());
|
||||||
|
assertEquals(20, n2.longValue());
|
||||||
|
assertEquals(2, n1.nextDoc());
|
||||||
|
assertEquals(20, n1.longValue());
|
||||||
|
assertEquals(DocIdSetIterator.NO_MORE_DOCS, n1.nextDoc());
|
||||||
|
assertEquals(DocIdSetIterator.NO_MORE_DOCS, n2.nextDoc());
|
||||||
|
IOUtils.close(r, writer, dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testIndependantSparseIterators() throws IOException {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
IndexWriterConfig conf = newIndexWriterConfig().setMergePolicy(newLogMergePolicy());
|
||||||
|
CannedNormSimilarity sim = new CannedNormSimilarity(new long[] {42, 10, 20});
|
||||||
|
conf.setSimilarity(sim);
|
||||||
|
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, conf);
|
||||||
|
Document doc = new Document();
|
||||||
|
Field indexedField = new TextField("indexed", "a", Field.Store.NO);
|
||||||
|
doc.add(indexedField);
|
||||||
|
Document emptyDoc = new Document();
|
||||||
|
for (int i = 0; i < 3; ++i) {
|
||||||
|
writer.addDocument(doc);
|
||||||
|
writer.addDocument(emptyDoc);
|
||||||
|
}
|
||||||
|
writer.forceMerge(1);
|
||||||
|
LeafReader r = getOnlyLeafReader(maybeWrapWithMergingReader(writer.getReader()));
|
||||||
|
NumericDocValues n1 = r.getNormValues("indexed");
|
||||||
|
NumericDocValues n2 = r.getNormValues("indexed");
|
||||||
|
assertEquals(0, n1.nextDoc());
|
||||||
|
assertEquals(42, n1.longValue());
|
||||||
|
assertEquals(2, n1.nextDoc());
|
||||||
|
assertEquals(10, n1.longValue());
|
||||||
|
assertEquals(0, n2.nextDoc());
|
||||||
|
assertEquals(42, n2.longValue());
|
||||||
|
assertEquals(2, n2.nextDoc());
|
||||||
|
assertEquals(10, n2.longValue());
|
||||||
|
assertEquals(4, n2.nextDoc());
|
||||||
|
assertEquals(20, n2.longValue());
|
||||||
|
assertEquals(4, n1.nextDoc());
|
||||||
|
assertEquals(20, n1.longValue());
|
||||||
|
assertEquals(DocIdSetIterator.NO_MORE_DOCS, n1.nextDoc());
|
||||||
|
assertEquals(DocIdSetIterator.NO_MORE_DOCS, n2.nextDoc());
|
||||||
|
IOUtils.close(r, writer, dir);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -140,7 +140,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
|
||||||
String[] idsList = docs.keySet().toArray(new String[docs.size()]);
|
String[] idsList = docs.keySet().toArray(new String[docs.size()]);
|
||||||
|
|
||||||
for(int x=0;x<2;x++) {
|
for(int x=0;x<2;x++) {
|
||||||
IndexReader r = w.getReader();
|
DirectoryReader r = maybeWrapWithMergingReader(w.getReader());
|
||||||
IndexSearcher s = newSearcher(r);
|
IndexSearcher s = newSearcher(r);
|
||||||
|
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
|
@ -181,7 +181,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
|
||||||
doc.add(newField("aaa", "a b c", customType));
|
doc.add(newField("aaa", "a b c", customType));
|
||||||
doc.add(newField("zzz", "1 2 3", customType));
|
doc.add(newField("zzz", "1 2 3", customType));
|
||||||
w.addDocument(doc);
|
w.addDocument(doc);
|
||||||
IndexReader r = w.getReader();
|
IndexReader r = maybeWrapWithMergingReader(w.getReader());
|
||||||
Document doc2 = r.document(0);
|
Document doc2 = r.document(0);
|
||||||
Iterator<IndexableField> it = doc2.getFields().iterator();
|
Iterator<IndexableField> it = doc2.getFields().iterator();
|
||||||
assertTrue(it.hasNext());
|
assertTrue(it.hasNext());
|
||||||
|
@ -280,7 +280,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
|
||||||
doc.add(new NumericDocValuesField("id", id));
|
doc.add(new NumericDocValuesField("id", id));
|
||||||
w.addDocument(doc);
|
w.addDocument(doc);
|
||||||
}
|
}
|
||||||
final DirectoryReader r = w.getReader();
|
final DirectoryReader r = maybeWrapWithMergingReader(w.getReader());
|
||||||
w.close();
|
w.close();
|
||||||
|
|
||||||
assertEquals(numDocs, r.numDocs());
|
assertEquals(numDocs, r.numDocs());
|
||||||
|
@ -309,7 +309,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
|
||||||
doc.add(new Field("field", "value", onlyStored));
|
doc.add(new Field("field", "value", onlyStored));
|
||||||
doc.add(new StringField("field2", "value", Field.Store.YES));
|
doc.add(new StringField("field2", "value", Field.Store.YES));
|
||||||
w.addDocument(doc);
|
w.addDocument(doc);
|
||||||
IndexReader r = w.getReader();
|
IndexReader r = maybeWrapWithMergingReader(w.getReader());
|
||||||
w.close();
|
w.close();
|
||||||
assertEquals(IndexOptions.NONE, r.document(0).getField("field").fieldType().indexOptions());
|
assertEquals(IndexOptions.NONE, r.document(0).getField("field").fieldType().indexOptions());
|
||||||
assertNotNull(r.document(0).getField("field2").fieldType().indexOptions());
|
assertNotNull(r.document(0).getField("field2").fieldType().indexOptions());
|
||||||
|
@ -352,7 +352,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
|
||||||
}
|
}
|
||||||
iw.commit();
|
iw.commit();
|
||||||
|
|
||||||
final DirectoryReader reader = DirectoryReader.open(dir);
|
final DirectoryReader reader = maybeWrapWithMergingReader(DirectoryReader.open(dir));
|
||||||
final int docID = random().nextInt(100);
|
final int docID = random().nextInt(100);
|
||||||
for (Field fld : fields) {
|
for (Field fld : fields) {
|
||||||
String fldName = fld.name();
|
String fldName = fld.name();
|
||||||
|
@ -383,7 +383,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
|
||||||
iw.addDocument(emptyDoc);
|
iw.addDocument(emptyDoc);
|
||||||
}
|
}
|
||||||
iw.commit();
|
iw.commit();
|
||||||
final DirectoryReader rd = DirectoryReader.open(dir);
|
final DirectoryReader rd = maybeWrapWithMergingReader(DirectoryReader.open(dir));
|
||||||
for (int i = 0; i < numDocs; ++i) {
|
for (int i = 0; i < numDocs; ++i) {
|
||||||
final Document doc = rd.document(i);
|
final Document doc = rd.document(i);
|
||||||
assertNotNull(doc);
|
assertNotNull(doc);
|
||||||
|
@ -412,7 +412,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
|
||||||
}
|
}
|
||||||
iw.commit();
|
iw.commit();
|
||||||
|
|
||||||
final DirectoryReader rd = DirectoryReader.open(dir);
|
final DirectoryReader rd = maybeWrapWithMergingReader(DirectoryReader.open(dir));
|
||||||
final IndexSearcher searcher = new IndexSearcher(rd);
|
final IndexSearcher searcher = new IndexSearcher(rd);
|
||||||
final int concurrentReads = atLeast(5);
|
final int concurrentReads = atLeast(5);
|
||||||
final int readsPerThread = atLeast(50);
|
final int readsPerThread = atLeast(50);
|
||||||
|
@ -545,7 +545,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
|
||||||
|
|
||||||
iw.commit();
|
iw.commit();
|
||||||
|
|
||||||
final DirectoryReader ir = DirectoryReader.open(dir);
|
final DirectoryReader ir = maybeWrapWithMergingReader(DirectoryReader.open(dir));
|
||||||
assertTrue(ir.numDocs() > 0);
|
assertTrue(ir.numDocs() > 0);
|
||||||
int numDocs = 0;
|
int numDocs = 0;
|
||||||
for (int i = 0; i < ir.maxDoc(); ++i) {
|
for (int i = 0; i < ir.maxDoc(); ++i) {
|
||||||
|
@ -649,7 +649,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
|
||||||
w.commit();
|
w.commit();
|
||||||
w.close();
|
w.close();
|
||||||
|
|
||||||
DirectoryReader reader = new DummyFilterDirectoryReader(DirectoryReader.open(dir));
|
DirectoryReader reader = new DummyFilterDirectoryReader(maybeWrapWithMergingReader(DirectoryReader.open(dir)));
|
||||||
|
|
||||||
Directory dir2 = newDirectory();
|
Directory dir2 = newDirectory();
|
||||||
w = new RandomIndexWriter(random(), dir2);
|
w = new RandomIndexWriter(random(), dir2);
|
||||||
|
@ -657,7 +657,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
|
||||||
reader.close();
|
reader.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
|
|
||||||
reader = w.getReader();
|
reader = maybeWrapWithMergingReader(w.getReader());
|
||||||
for (int i = 0; i < reader.maxDoc(); ++i) {
|
for (int i = 0; i < reader.maxDoc(); ++i) {
|
||||||
final Document doc = reader.document(i);
|
final Document doc = reader.document(i);
|
||||||
final int id = doc.getField("id").numericValue().intValue();
|
final int id = doc.getField("id").numericValue().intValue();
|
||||||
|
@ -728,7 +728,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
|
||||||
}
|
}
|
||||||
iw.commit();
|
iw.commit();
|
||||||
iw.forceMerge(1); // look at what happens when big docs are merged
|
iw.forceMerge(1); // look at what happens when big docs are merged
|
||||||
final DirectoryReader rd = DirectoryReader.open(dir);
|
final DirectoryReader rd = maybeWrapWithMergingReader(DirectoryReader.open(dir));
|
||||||
final IndexSearcher searcher = new IndexSearcher(rd);
|
final IndexSearcher searcher = new IndexSearcher(rd);
|
||||||
for (int i = 0; i < numDocs; ++i) {
|
for (int i = 0; i < numDocs; ++i) {
|
||||||
final Query query = new TermQuery(new Term("id", "" + i));
|
final Query query = new TermQuery(new Term("id", "" + i));
|
||||||
|
@ -788,7 +788,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
|
||||||
iw.addDocument(doc);
|
iw.addDocument(doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
DirectoryReader reader = DirectoryReader.open(iw);
|
DirectoryReader reader = maybeWrapWithMergingReader(DirectoryReader.open(iw));
|
||||||
// mix up fields explicitly
|
// mix up fields explicitly
|
||||||
if (random().nextBoolean()) {
|
if (random().nextBoolean()) {
|
||||||
reader = new MismatchedDirectoryReader(reader, random());
|
reader = new MismatchedDirectoryReader(reader, random());
|
||||||
|
|
|
@ -0,0 +1,75 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.lucene.index;
|
||||||
|
|
||||||
|
import org.apache.lucene.codecs.NormsProducer;
|
||||||
|
import org.apache.lucene.codecs.StoredFieldsReader;
|
||||||
|
import org.apache.lucene.util.CloseableThreadLocal;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@link CodecReader} wrapper that performs all reads using the merging
|
||||||
|
* instance of the index formats.
|
||||||
|
*/
|
||||||
|
public class MergingCodecReader extends FilterCodecReader {
|
||||||
|
|
||||||
|
private final CloseableThreadLocal<StoredFieldsReader> fieldsReader = new CloseableThreadLocal<StoredFieldsReader>() {
|
||||||
|
@Override
|
||||||
|
protected StoredFieldsReader initialValue() {
|
||||||
|
return in.getFieldsReader().getMergeInstance();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
private final CloseableThreadLocal<NormsProducer> normsReader = new CloseableThreadLocal<NormsProducer>() {
|
||||||
|
@Override
|
||||||
|
protected NormsProducer initialValue() {
|
||||||
|
NormsProducer norms = in.getNormsReader();
|
||||||
|
if (norms == null) {
|
||||||
|
return null;
|
||||||
|
} else {
|
||||||
|
return norms.getMergeInstance();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
// TODO: other formats too
|
||||||
|
|
||||||
|
/** Wrap the given instance. */
|
||||||
|
public MergingCodecReader(CodecReader in) {
|
||||||
|
super(in);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public StoredFieldsReader getFieldsReader() {
|
||||||
|
return fieldsReader.get();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public NormsProducer getNormsReader() {
|
||||||
|
return normsReader.get();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CacheHelper getCoreCacheHelper() {
|
||||||
|
// same content, we can delegate
|
||||||
|
return in.getCoreCacheHelper();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CacheHelper getReaderCacheHelper() {
|
||||||
|
// same content, we can delegate
|
||||||
|
return in.getReaderCacheHelper();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,50 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.lucene.index;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@link DirectoryReader} wrapper that uses the merge instances of the wrapped
|
||||||
|
* {@link CodecReader}s.
|
||||||
|
* NOTE: This class will fail to work if the leaves of the wrapped directory are
|
||||||
|
* not codec readers.
|
||||||
|
*/
|
||||||
|
public final class MergingDirectoryReaderWrapper extends FilterDirectoryReader {
|
||||||
|
|
||||||
|
/** Wrap the given directory. */
|
||||||
|
public MergingDirectoryReaderWrapper(DirectoryReader in) throws IOException {
|
||||||
|
super(in, new SubReaderWrapper() {
|
||||||
|
@Override
|
||||||
|
public LeafReader wrap(LeafReader reader) {
|
||||||
|
return new MergingCodecReader((CodecReader) reader);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
|
||||||
|
return new MergingDirectoryReaderWrapper(in);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CacheHelper getReaderCacheHelper() {
|
||||||
|
// doesn't change the content: can delegate
|
||||||
|
return in.getReaderCacheHelper();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -1669,7 +1669,7 @@ public abstract class LuceneTestCase extends Assert {
|
||||||
Random random = random();
|
Random random = random();
|
||||||
|
|
||||||
for (int i = 0, c = random.nextInt(6)+1; i < c; i++) {
|
for (int i = 0, c = random.nextInt(6)+1; i < c; i++) {
|
||||||
switch(random.nextInt(4)) {
|
switch(random.nextInt(5)) {
|
||||||
case 0:
|
case 0:
|
||||||
// will create no FC insanity in atomic case, as ParallelLeafReader has own cache key:
|
// will create no FC insanity in atomic case, as ParallelLeafReader has own cache key:
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
|
@ -1722,6 +1722,25 @@ public abstract class LuceneTestCase extends Assert {
|
||||||
r = new MismatchedDirectoryReader((DirectoryReader)r, random);
|
r = new MismatchedDirectoryReader((DirectoryReader)r, random);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
case 4:
|
||||||
|
if (VERBOSE) {
|
||||||
|
System.out.println("NOTE: LuceneTestCase.wrapReader: wrapping previous reader=" + r + " with MergingCodecReader");
|
||||||
|
}
|
||||||
|
if (r instanceof CodecReader) {
|
||||||
|
r = new MergingCodecReader((CodecReader) r);
|
||||||
|
} else if (r instanceof DirectoryReader) {
|
||||||
|
boolean allLeavesAreCodecReaders = true;
|
||||||
|
for (LeafReaderContext ctx : r.leaves()) {
|
||||||
|
if (ctx.reader() instanceof CodecReader == false) {
|
||||||
|
allLeavesAreCodecReaders = false;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (allLeavesAreCodecReaders) {
|
||||||
|
r = new MergingDirectoryReaderWrapper((DirectoryReader) r);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
fail("should not get here");
|
fail("should not get here");
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue