mirror of https://github.com/apache/lucene.git
LUCENE-7500: Remove LeafReader.fields in lieu of LeafReader.terms.
Optimized MultiFields.getTerms.
This commit is contained in:
parent
c3c895548f
commit
abc393dbfd
|
@ -69,6 +69,11 @@ API Changes
|
|||
|
||||
* LUCENE-7850: Removed support for legacy numerics. (Adrien Grand)
|
||||
|
||||
* LUCENE-7500: Removed abstract LeafReader.fields(); instead terms(fieldName)
|
||||
has been made abstract, fomerly was final. Also, MultiFields.getTerms
|
||||
was optimized to work directly instead of being implemented on getFields.
|
||||
(David Smiley)
|
||||
|
||||
Bug Fixes
|
||||
|
||||
* LUCENE-7626: IndexWriter will no longer accept broken token offsets
|
||||
|
|
|
@ -98,12 +98,15 @@ public abstract class CodecReader extends LeafReader implements Accountable {
|
|||
throw new IndexOutOfBoundsException("docID must be >= 0 and < maxDoc=" + maxDoc() + " (got docID=" + docID + ")");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public final Fields fields() {
|
||||
return getPostingsReader();
|
||||
public final Terms terms(String field) throws IOException {
|
||||
//ensureOpen(); no; getPostingsReader calls this
|
||||
// We could check the FieldInfo IndexOptions but there's no point since
|
||||
// PostingsReader will simply return null for fields that don't exist or that have no terms index.
|
||||
return getPostingsReader().terms(field);
|
||||
}
|
||||
|
||||
|
||||
// returns the FieldInfo that corresponds to the given field and type, or
|
||||
// null if the field does not exist, or not indexed as the requested
|
||||
// DovDocValuesType.
|
||||
|
|
|
@ -17,14 +17,13 @@
|
|||
package org.apache.lucene.index;
|
||||
|
||||
|
||||
import org.apache.lucene.index.FilterLeafReader.FilterFields;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.index.FilterLeafReader.FilterTerms;
|
||||
import org.apache.lucene.index.FilterLeafReader.FilterTermsEnum;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.automaton.CompiledAutomaton;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
||||
/**
|
||||
* The {@link ExitableDirectoryReader} wraps a real index {@link DirectoryReader} and
|
||||
|
@ -79,14 +78,12 @@ public class ExitableDirectoryReader extends FilterDirectoryReader {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Fields fields() throws IOException {
|
||||
Fields fields = super.fields();
|
||||
if (queryTimeout.isTimeoutEnabled()) {
|
||||
return new ExitableFields(fields, queryTimeout);
|
||||
}
|
||||
else {
|
||||
return fields; // break out of wrapper as soon as possible
|
||||
public Terms terms(String field) throws IOException {
|
||||
Terms terms = in.terms(field);
|
||||
if (terms == null) {
|
||||
return null;
|
||||
}
|
||||
return (queryTimeout.isTimeoutEnabled()) ? new ExitableTerms(terms, queryTimeout) : terms;
|
||||
}
|
||||
|
||||
// this impl does not change deletes or data so we can delegate the
|
||||
|
@ -103,29 +100,6 @@ public class ExitableDirectoryReader extends FilterDirectoryReader {
|
|||
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper class for another Fields implementation that is used by the ExitableFilterAtomicReader.
|
||||
*/
|
||||
public static class ExitableFields extends FilterFields {
|
||||
|
||||
private QueryTimeout queryTimeout;
|
||||
|
||||
/** Constructor **/
|
||||
public ExitableFields(Fields fields, QueryTimeout queryTimeout) {
|
||||
super(fields);
|
||||
this.queryTimeout = queryTimeout;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Terms terms(String field) throws IOException {
|
||||
Terms terms = in.terms(field);
|
||||
if (terms == null) {
|
||||
return null;
|
||||
}
|
||||
return new ExitableTerms(terms, queryTimeout);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper class for another Terms implementation that is used by ExitableFields.
|
||||
*/
|
||||
|
|
|
@ -20,9 +20,15 @@ package org.apache.lucene.index;
|
|||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
||||
/** Flex API for access to fields and terms
|
||||
* @lucene.experimental */
|
||||
import org.apache.lucene.codecs.FieldsProducer;
|
||||
|
||||
/**
|
||||
* Provides a {@link Terms} index for fields that have it, and lists which fields do.
|
||||
* This is primarily an internal/experimental API (see {@link FieldsProducer}),
|
||||
* although it is also used to expose the set of term vectors per document.
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public abstract class Fields implements Iterable<String> {
|
||||
|
||||
/** Sole constructor. (For invocation by subclass
|
||||
|
|
|
@ -345,11 +345,11 @@ public abstract class FilterLeafReader extends LeafReader {
|
|||
protected void doClose() throws IOException {
|
||||
in.close();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Fields fields() throws IOException {
|
||||
public Terms terms(String field) throws IOException {
|
||||
ensureOpen();
|
||||
return in.fields();
|
||||
return in.terms(field);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -18,7 +18,6 @@ package org.apache.lucene.index;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.index.IndexReader.CacheHelper;
|
||||
import org.apache.lucene.util.Bits;
|
||||
|
||||
/** {@code LeafReader} is an abstract class, providing an interface for accessing an
|
||||
|
@ -60,7 +59,7 @@ public abstract class LeafReader extends IndexReader {
|
|||
}
|
||||
|
||||
/**
|
||||
* Optional method: Return a {@link CacheHelper} that can be used to cache
|
||||
* Optional method: Return a {@link IndexReader.CacheHelper} that can be used to cache
|
||||
* based on the content of this leaf regardless of deletions. Two readers
|
||||
* that have the same data but different sets of deleted documents or doc
|
||||
* values updates may be considered equal. Consider using
|
||||
|
@ -73,12 +72,6 @@ public abstract class LeafReader extends IndexReader {
|
|||
*/
|
||||
public abstract CacheHelper getCoreCacheHelper();
|
||||
|
||||
/**
|
||||
* Returns {@link Fields} for this reader.
|
||||
* This method will not return null.
|
||||
*/
|
||||
public abstract Fields fields() throws IOException;
|
||||
|
||||
@Override
|
||||
public final int docFreq(Term term) throws IOException {
|
||||
final Terms terms = terms(term.field());
|
||||
|
@ -139,10 +132,8 @@ public abstract class LeafReader extends IndexReader {
|
|||
return terms.getSumTotalTermFreq();
|
||||
}
|
||||
|
||||
/** This may return null if the field does not exist.*/
|
||||
public final Terms terms(String field) throws IOException {
|
||||
return fields().terms(field);
|
||||
}
|
||||
/** Returns the {@link Terms} index for this field, or null if it has none. */
|
||||
public abstract Terms terms(String field) throws IOException;
|
||||
|
||||
/** Returns {@link PostingsEnum} for the specified term.
|
||||
* This will return null if either the field or
|
||||
|
|
|
@ -70,8 +70,11 @@ class MergeReaderWrapper extends LeafReader {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Fields fields() throws IOException {
|
||||
return fields;
|
||||
public Terms terms(String field) throws IOException {
|
||||
ensureOpen();
|
||||
// We could check the FieldInfo IndexOptions but there's no point since
|
||||
// PostingsReader will simply return null for fields that don't exist or that have no terms index.
|
||||
return fields.terms(field);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.lucene.index;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
@ -31,11 +32,12 @@ import org.apache.lucene.util.BytesRef;
|
|||
import org.apache.lucene.util.MergedIterator;
|
||||
|
||||
/**
|
||||
* Exposes flex API, merged from flex API of sub-segments.
|
||||
* Provides a single {@link Fields} term index view over an
|
||||
* {@link IndexReader}.
|
||||
* This is useful when you're interacting with an {@link
|
||||
* IndexReader} implementation that consists of sequential
|
||||
* sub-readers (eg {@link DirectoryReader} or {@link
|
||||
* MultiReader}).
|
||||
* MultiReader}) and you must treat it as a {@link LeafReader}.
|
||||
*
|
||||
* <p><b>NOTE</b>: for composite readers, you'll get better
|
||||
* performance by gathering the sub readers using
|
||||
|
@ -45,7 +47,6 @@ import org.apache.lucene.util.MergedIterator;
|
|||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
|
||||
public final class MultiFields extends Fields {
|
||||
private final Fields[] subs;
|
||||
private final ReaderSlice[] subSlices;
|
||||
|
@ -64,13 +65,13 @@ public final class MultiFields extends Fields {
|
|||
switch (leaves.size()) {
|
||||
case 1:
|
||||
// already an atomic reader / reader with one leave
|
||||
return leaves.get(0).reader().fields();
|
||||
return new LeafReaderFields(leaves.get(0).reader());
|
||||
default:
|
||||
final List<Fields> fields = new ArrayList<>(leaves.size());
|
||||
final List<ReaderSlice> slices = new ArrayList<>(leaves.size());
|
||||
for (final LeafReaderContext ctx : leaves) {
|
||||
final LeafReader r = ctx.reader();
|
||||
final Fields f = r.fields();
|
||||
final Fields f = new LeafReaderFields(r);
|
||||
fields.add(f);
|
||||
slices.add(new ReaderSlice(ctx.docBase, r.maxDoc(), fields.size()-1));
|
||||
}
|
||||
|
@ -115,9 +116,31 @@ public final class MultiFields extends Fields {
|
|||
}
|
||||
}
|
||||
|
||||
/** This method may return null if the field does not exist.*/
|
||||
/** This method may return null if the field does not exist or if it has no terms. */
|
||||
public static Terms getTerms(IndexReader r, String field) throws IOException {
|
||||
return getFields(r).terms(field);
|
||||
final List<LeafReaderContext> leaves = r.leaves();
|
||||
if (leaves.size() == 1) {
|
||||
return leaves.get(0).reader().terms(field);
|
||||
}
|
||||
|
||||
final List<Terms> termsPerLeaf = new ArrayList<>(leaves.size());
|
||||
final List<ReaderSlice> slicePerLeaf = new ArrayList<>(leaves.size());
|
||||
|
||||
for (int leafIdx = 0; leafIdx < leaves.size(); leafIdx++) {
|
||||
LeafReaderContext ctx = leaves.get(leafIdx);
|
||||
Terms subTerms = ctx.reader().terms(field);
|
||||
if (subTerms != null) {
|
||||
termsPerLeaf.add(subTerms);
|
||||
slicePerLeaf.add(new ReaderSlice(ctx.docBase, r.maxDoc(), leafIdx - 1));
|
||||
}
|
||||
}
|
||||
|
||||
if (termsPerLeaf.size() == 0) {
|
||||
return null;
|
||||
} else {
|
||||
return new MultiTerms(termsPerLeaf.toArray(Terms.EMPTY_ARRAY),
|
||||
slicePerLeaf.toArray(ReaderSlice.EMPTY_ARRAY));
|
||||
}
|
||||
}
|
||||
|
||||
/** Returns {@link PostingsEnum} for the specified field and
|
||||
|
@ -264,5 +287,37 @@ public final class MultiFields extends Fields {
|
|||
}
|
||||
return fields;
|
||||
}
|
||||
|
||||
private static class LeafReaderFields extends Fields {
|
||||
|
||||
private final LeafReader leafReader;
|
||||
private final List<String> indexedFields;
|
||||
|
||||
LeafReaderFields(LeafReader leafReader) {
|
||||
this.leafReader = leafReader;
|
||||
this.indexedFields = new ArrayList<>();
|
||||
for (FieldInfo fieldInfo : leafReader.getFieldInfos()) {
|
||||
if (fieldInfo.getIndexOptions() != IndexOptions.NONE) {
|
||||
indexedFields.add(fieldInfo.name);
|
||||
}
|
||||
}
|
||||
Collections.sort(indexedFields);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<String> iterator() {
|
||||
return Collections.unmodifiableList(indexedFields).iterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return indexedFields.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Terms terms(String field) throws IOException {
|
||||
return leafReader.terms(field);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.lucene.index;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
@ -50,7 +51,6 @@ import org.apache.lucene.util.Version;
|
|||
*/
|
||||
public class ParallelLeafReader extends LeafReader {
|
||||
private final FieldInfos fieldInfos;
|
||||
private final ParallelFields fields = new ParallelFields();
|
||||
private final LeafReader[] parallelReaders, storedFieldsReaders;
|
||||
private final Set<LeafReader> completeReaderSet =
|
||||
Collections.newSetFromMap(new IdentityHashMap<LeafReader,Boolean>());
|
||||
|
@ -58,9 +58,10 @@ public class ParallelLeafReader extends LeafReader {
|
|||
private final int maxDoc, numDocs;
|
||||
private final boolean hasDeletions;
|
||||
private final LeafMetaData metaData;
|
||||
private final SortedMap<String,LeafReader> fieldToReader = new TreeMap<>();
|
||||
private final SortedMap<String,LeafReader> tvFieldToReader = new TreeMap<>();
|
||||
|
||||
private final SortedMap<String,LeafReader> fieldToReader = new TreeMap<>();//TODO needn't sort?
|
||||
private final Map<String,LeafReader> termsFieldToReader = new HashMap<>();
|
||||
|
||||
/** Create a ParallelLeafReader based on the provided
|
||||
* readers; auto-closes the given readers on {@link #close()}. */
|
||||
public ParallelLeafReader(LeafReader... readers) throws IOException {
|
||||
|
@ -130,9 +131,15 @@ public class ParallelLeafReader extends LeafReader {
|
|||
if (!fieldToReader.containsKey(fieldInfo.name)) {
|
||||
builder.add(fieldInfo);
|
||||
fieldToReader.put(fieldInfo.name, reader);
|
||||
// only add these if the reader responsible for that field name is the current:
|
||||
// TODO consider populating 1st leaf with vectors even if the field name has been seen on a previous leaf
|
||||
if (fieldInfo.hasVectors()) {
|
||||
tvFieldToReader.put(fieldInfo.name, reader);
|
||||
}
|
||||
// TODO consider populating 1st leaf with terms even if the field name has been seen on a previous leaf
|
||||
if (fieldInfo.getIndexOptions() != IndexOptions.NONE) {
|
||||
termsFieldToReader.put(fieldInfo.name, reader);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -154,17 +161,6 @@ public class ParallelLeafReader extends LeafReader {
|
|||
|
||||
fieldInfos = builder.finish();
|
||||
this.metaData = new LeafMetaData(createdVersionMajor, minVersion, indexSort);
|
||||
|
||||
// build Fields instance
|
||||
for (final LeafReader reader : this.parallelReaders) {
|
||||
final Fields readerFields = reader.fields();
|
||||
for (String field : readerFields) {
|
||||
// only add if the reader responsible for that field name is the current:
|
||||
if (fieldToReader.get(field) == reader) {
|
||||
this.fields.addField(field, readerFields.terms(field));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// do this finally so any Exceptions occurred before don't affect refcounts:
|
||||
for (LeafReader reader : completeReaderSet) {
|
||||
|
@ -230,13 +226,14 @@ public class ParallelLeafReader extends LeafReader {
|
|||
ensureOpen();
|
||||
return hasDeletions ? parallelReaders[0].getLiveDocs() : null;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Fields fields() {
|
||||
public Terms terms(String field) throws IOException {
|
||||
ensureOpen();
|
||||
return fields;
|
||||
LeafReader leafReader = termsFieldToReader.get(field);
|
||||
return leafReader == null ? null : leafReader.terms(field);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public int numDocs() {
|
||||
// Don't call ensureOpen() here (it could affect performance)
|
||||
|
|
|
@ -18,6 +18,8 @@ package org.apache.lucene.index;
|
|||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.apache.lucene.codecs.DocValuesProducer;
|
||||
|
@ -284,21 +286,27 @@ public final class SlowCodecReaderWrapper {
|
|||
}
|
||||
|
||||
private static FieldsProducer readerToFieldsProducer(final LeafReader reader) throws IOException {
|
||||
final Fields fields = reader.fields();
|
||||
ArrayList<String> indexedFields = new ArrayList<>();
|
||||
for (FieldInfo fieldInfo : reader.getFieldInfos()) {
|
||||
if (fieldInfo.getIndexOptions() != IndexOptions.NONE) {
|
||||
indexedFields.add(fieldInfo.name);
|
||||
}
|
||||
}
|
||||
Collections.sort(indexedFields);
|
||||
return new FieldsProducer() {
|
||||
@Override
|
||||
public Iterator<String> iterator() {
|
||||
return fields.iterator();
|
||||
return indexedFields.iterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Terms terms(String field) throws IOException {
|
||||
return fields.terms(field);
|
||||
return reader.terms(field);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return fields.size();
|
||||
return indexedFields.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -49,6 +49,7 @@ import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS;
|
|||
*/
|
||||
class SortingLeafReader extends FilterLeafReader {
|
||||
|
||||
//TODO remove from here; move to FreqProxTermsWriter or FreqProxFields?
|
||||
static class SortingFields extends FilterFields {
|
||||
|
||||
private final Sorter.DocMap docMap;
|
||||
|
@ -1042,8 +1043,9 @@ class SortingLeafReader extends FilterLeafReader {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Fields fields() throws IOException {
|
||||
return new SortingFields(in.fields(), in.getFieldInfos(), docMap);
|
||||
public Terms terms(String field) throws IOException {
|
||||
Terms terms = super.terms(field);
|
||||
return terms==null ? null : new SortingTerms(terms, in.getFieldInfos().fieldInfo(field).getIndexOptions(), docMap);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -37,17 +37,6 @@ import org.junit.Ignore;
|
|||
public class TestExitableDirectoryReader extends LuceneTestCase {
|
||||
private static class TestReader extends FilterLeafReader {
|
||||
|
||||
private static class TestFields extends FilterFields {
|
||||
TestFields(Fields in) {
|
||||
super(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Terms terms(String field) throws IOException {
|
||||
return new TestTerms(super.terms(field));
|
||||
}
|
||||
}
|
||||
|
||||
private static class TestTerms extends FilterTerms {
|
||||
TestTerms(Terms in) {
|
||||
super(in);
|
||||
|
@ -83,8 +72,9 @@ public class TestExitableDirectoryReader extends LuceneTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Fields fields() throws IOException {
|
||||
return new TestFields(super.fields());
|
||||
public Terms terms(String field) throws IOException {
|
||||
Terms terms = super.terms(field);
|
||||
return terms==null ? null : new TestTerms(terms);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -35,18 +35,6 @@ public class TestFilterLeafReader extends LuceneTestCase {
|
|||
|
||||
private static class TestReader extends FilterLeafReader {
|
||||
|
||||
/** Filter that only permits terms containing 'e'.*/
|
||||
private static class TestFields extends FilterFields {
|
||||
TestFields(Fields in) {
|
||||
super(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Terms terms(String field) throws IOException {
|
||||
return new TestTerms(super.terms(field));
|
||||
}
|
||||
}
|
||||
|
||||
private static class TestTerms extends FilterTerms {
|
||||
TestTerms(Terms in) {
|
||||
super(in);
|
||||
|
@ -103,8 +91,9 @@ public class TestFilterLeafReader extends LuceneTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Fields fields() throws IOException {
|
||||
return new TestFields(super.fields());
|
||||
public Terms terms(String field) throws IOException {
|
||||
Terms terms = super.terms(field);
|
||||
return terms==null ? null : new TestTerms(terms);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -18,7 +18,6 @@ package org.apache.lucene.index;
|
|||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
|
@ -90,21 +89,10 @@ public class TestParallelTermEnum extends LuceneTestCase {
|
|||
public void test1() throws IOException {
|
||||
ParallelLeafReader pr = new ParallelLeafReader(ir1, ir2);
|
||||
|
||||
Fields fields = pr.fields();
|
||||
Iterator<String> fe = fields.iterator();
|
||||
assertEquals(3, pr.getFieldInfos().size());
|
||||
|
||||
String f = fe.next();
|
||||
assertEquals("field1", f);
|
||||
checkTerms(fields.terms(f), "brown", "fox", "jumps", "quick", "the");
|
||||
|
||||
f = fe.next();
|
||||
assertEquals("field2", f);
|
||||
checkTerms(fields.terms(f), "brown", "fox", "jumps", "quick", "the");
|
||||
|
||||
f = fe.next();
|
||||
assertEquals("field3", f);
|
||||
checkTerms(fields.terms(f), "dog", "fox", "jumps", "lazy", "over", "the");
|
||||
|
||||
assertFalse(fe.hasNext());
|
||||
checkTerms(pr.terms("field1"), "brown", "fox", "jumps", "quick", "the");
|
||||
checkTerms(pr.terms("field2"), "brown", "fox", "jumps", "quick", "the");
|
||||
checkTerms(pr.terms("field3"), "dog", "fox", "jumps", "lazy", "over", "the");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.apache.lucene.document.Document;
|
|||
import org.apache.lucene.document.Field.Store;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.Fields;
|
||||
import org.apache.lucene.index.FilterDirectoryReader;
|
||||
import org.apache.lucene.index.FilterLeafReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
|
@ -219,21 +218,16 @@ public class TermInSetQueryTest extends LuceneTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Fields fields() throws IOException {
|
||||
return new FilterFields(in.fields()) {
|
||||
public Terms terms(String field) throws IOException {
|
||||
Terms terms = super.terms(field);
|
||||
if (terms == null) {
|
||||
return null;
|
||||
}
|
||||
return new FilterTerms(terms) {
|
||||
@Override
|
||||
public Terms terms(String field) throws IOException {
|
||||
final Terms in = this.in.terms(field);
|
||||
if (in == null) {
|
||||
return null;
|
||||
}
|
||||
return new FilterTerms(in) {
|
||||
@Override
|
||||
public TermsEnum iterator() throws IOException {
|
||||
counter.incrementAndGet();
|
||||
return super.iterator();
|
||||
}
|
||||
};
|
||||
public TermsEnum iterator() throws IOException {
|
||||
counter.incrementAndGet();
|
||||
return super.iterator();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -22,7 +22,6 @@ import org.apache.lucene.document.Document;
|
|||
import org.apache.lucene.document.Field.Store;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.Fields;
|
||||
import org.apache.lucene.index.FilterDirectoryReader;
|
||||
import org.apache.lucene.index.FilterLeafReader;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
|
@ -123,31 +122,27 @@ public class TestTermQuery extends LuceneTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Fields fields() throws IOException {
|
||||
return new FilterFields(super.fields()) {
|
||||
public Terms terms(String field) throws IOException {
|
||||
Terms terms = super.terms(field);
|
||||
return terms==null ? null : new FilterTerms(terms) {
|
||||
@Override
|
||||
public Terms terms(String field) throws IOException {
|
||||
return new FilterTerms(super.terms(field)) {
|
||||
public TermsEnum iterator() throws IOException {
|
||||
return new FilterTermsEnum(super.iterator()) {
|
||||
@Override
|
||||
public TermsEnum iterator() throws IOException {
|
||||
return new FilterTermsEnum(super.iterator()) {
|
||||
@Override
|
||||
public SeekStatus seekCeil(BytesRef text) throws IOException {
|
||||
throw new AssertionError("no seek");
|
||||
}
|
||||
@Override
|
||||
public void seekExact(BytesRef term, TermState state) throws IOException {
|
||||
throw new AssertionError("no seek");
|
||||
}
|
||||
@Override
|
||||
public boolean seekExact(BytesRef text) throws IOException {
|
||||
throw new AssertionError("no seek");
|
||||
}
|
||||
@Override
|
||||
public void seekExact(long ord) throws IOException {
|
||||
throw new AssertionError("no seek");
|
||||
}
|
||||
};
|
||||
public SeekStatus seekCeil(BytesRef text) throws IOException {
|
||||
throw new AssertionError("no seek");
|
||||
}
|
||||
@Override
|
||||
public void seekExact(BytesRef term, TermState state) throws IOException {
|
||||
throw new AssertionError("no seek");
|
||||
}
|
||||
@Override
|
||||
public boolean seekExact(BytesRef text) throws IOException {
|
||||
throw new AssertionError("no seek");
|
||||
}
|
||||
@Override
|
||||
public void seekExact(long ord) throws IOException {
|
||||
throw new AssertionError("no seek");
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -25,8 +25,8 @@ import org.apache.lucene.index.DocValuesType;
|
|||
import org.apache.lucene.index.FieldInfo;
|
||||
import org.apache.lucene.index.FieldInfos;
|
||||
import org.apache.lucene.index.Fields;
|
||||
import org.apache.lucene.index.LeafMetaData;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.LeafMetaData;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.NumericDocValues;
|
||||
import org.apache.lucene.index.PointValues;
|
||||
|
@ -90,8 +90,8 @@ public class TermVectorLeafReader extends LeafReader {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Fields fields() throws IOException {
|
||||
return fields;
|
||||
public Terms terms(String field) throws IOException {
|
||||
return fields.terms(field);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -148,7 +148,7 @@ public class TermVectorLeafReader extends LeafReader {
|
|||
if (docID != 0) {
|
||||
return null;
|
||||
}
|
||||
return fields();
|
||||
return fields;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -18,7 +18,6 @@ package org.apache.lucene.search.highlight;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
|
@ -30,7 +29,6 @@ import org.apache.lucene.analysis.CachingTokenFilter;
|
|||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.index.BinaryDocValues;
|
||||
import org.apache.lucene.index.FieldInfos;
|
||||
import org.apache.lucene.index.Fields;
|
||||
import org.apache.lucene.index.FilterLeafReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
|
@ -429,30 +427,15 @@ public class WeightedSpanTermExtractor {
|
|||
DelegatingLeafReader(LeafReader in) {
|
||||
super(in);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public FieldInfos getFieldInfos() {
|
||||
throw new UnsupportedOperationException();
|
||||
throw new UnsupportedOperationException();//TODO merge them
|
||||
}
|
||||
|
||||
@Override
|
||||
public Fields fields() throws IOException {
|
||||
return new FilterFields(super.fields()) {
|
||||
@Override
|
||||
public Terms terms(String field) throws IOException {
|
||||
return super.terms(DelegatingLeafReader.FIELD_NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<String> iterator() {
|
||||
return Collections.singletonList(DelegatingLeafReader.FIELD_NAME).iterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return 1;
|
||||
}
|
||||
};
|
||||
public Terms terms(String field) throws IOException {
|
||||
return super.terms(DelegatingLeafReader.FIELD_NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -24,7 +24,6 @@ import java.util.Collections;
|
|||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -36,7 +35,6 @@ import java.util.function.Predicate;
|
|||
|
||||
import org.apache.lucene.index.BinaryDocValues;
|
||||
import org.apache.lucene.index.FieldInfos;
|
||||
import org.apache.lucene.index.Fields;
|
||||
import org.apache.lucene.index.FilterLeafReader;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
|
@ -529,12 +527,16 @@ public class PhraseHelper {
|
|||
}
|
||||
}
|
||||
|
||||
//TODO move up; it's currently inbetween other inner classes that are related
|
||||
/**
|
||||
* Needed to support the ability to highlight a query irrespective of the field a query refers to
|
||||
* (aka requireFieldMatch=false).
|
||||
* This reader will just delegate every call to a single field in the wrapped
|
||||
* LeafReader. This way we ensure that all queries going through this reader target the same field.
|
||||
*/
|
||||
*/
|
||||
static final class SingleFieldFilterLeafReader extends FilterLeafReader {
|
||||
final String fieldName;
|
||||
|
||||
SingleFieldFilterLeafReader(LeafReader in, String fieldName) {
|
||||
super(in);
|
||||
this.fieldName = fieldName;
|
||||
|
@ -542,27 +544,12 @@ public class PhraseHelper {
|
|||
|
||||
@Override
|
||||
public FieldInfos getFieldInfos() {
|
||||
throw new UnsupportedOperationException();
|
||||
throw new UnsupportedOperationException();//TODO merge them
|
||||
}
|
||||
|
||||
@Override
|
||||
public Fields fields() throws IOException {
|
||||
return new FilterFields(super.fields()) {
|
||||
@Override
|
||||
public Terms terms(String field) throws IOException {
|
||||
return super.terms(fieldName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<String> iterator() {
|
||||
return Collections.singletonList(fieldName).iterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return 1;
|
||||
}
|
||||
};
|
||||
public Terms terms(String field) throws IOException {
|
||||
return super.terms(fieldName);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -18,7 +18,6 @@ package org.apache.lucene.search.uhighlight;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.index.Fields;
|
||||
import org.apache.lucene.index.FilterLeafReader;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.PostingsEnum;
|
||||
|
@ -52,24 +51,9 @@ final class TermVectorFilteredLeafReader extends FilterLeafReader {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Fields fields() throws IOException {
|
||||
return new TermVectorFilteredFields(in.fields(), filterTerms);
|
||||
}
|
||||
|
||||
private static final class TermVectorFilteredFields extends FilterLeafReader.FilterFields {
|
||||
// NOTE: super ("in") is baseFields
|
||||
|
||||
private final Terms filterTerms;
|
||||
|
||||
TermVectorFilteredFields(Fields baseFields, Terms filterTerms) {
|
||||
super(baseFields);
|
||||
this.filterTerms = filterTerms;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Terms terms(String field) throws IOException {
|
||||
return new TermsFilteredTerms(in.terms(field), filterTerms);
|
||||
}
|
||||
public Terms terms(String field) throws IOException {
|
||||
Terms terms = in.terms(field);
|
||||
return terms==null ? null : new TermsFilteredTerms(terms, filterTerms);
|
||||
}
|
||||
|
||||
private static final class TermsFilteredTerms extends FilterLeafReader.FilterTerms {
|
||||
|
|
|
@ -1134,7 +1134,7 @@ public class MemoryIndex {
|
|||
*/
|
||||
private final class MemoryIndexReader extends LeafReader {
|
||||
|
||||
private Fields memoryFields = new MemoryFields(fields);
|
||||
private final MemoryFields memoryFields = new MemoryFields(fields);
|
||||
|
||||
private MemoryIndexReader() {
|
||||
super(); // avoid as much superclass baggage as possible
|
||||
|
@ -1236,8 +1236,8 @@ public class MemoryIndex {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Fields fields() {
|
||||
return memoryFields;
|
||||
public Terms terms(String field) throws IOException {
|
||||
return memoryFields.terms(field);
|
||||
}
|
||||
|
||||
private class MemoryFields extends Fields {
|
||||
|
@ -1589,7 +1589,7 @@ public class MemoryIndex {
|
|||
@Override
|
||||
public Fields getTermVectors(int docID) {
|
||||
if (docID == 0) {
|
||||
return fields();
|
||||
return memoryFields;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -71,10 +71,11 @@ public class AssertingLeafReader extends FilterLeafReader {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Fields fields() throws IOException {
|
||||
return new AssertingFields(super.fields());
|
||||
public Terms terms(String field) throws IOException {
|
||||
Terms terms = super.terms(field);
|
||||
return terms == null ? null : new AssertingTerms(terms);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Fields getTermVectors(int docID) throws IOException {
|
||||
Fields fields = super.getTermVectors(docID);
|
||||
|
|
|
@ -335,7 +335,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase {
|
|||
|
||||
// PostingsFormat
|
||||
try (FieldsConsumer consumer = codec.postingsFormat().fieldsConsumer(writeState)) {
|
||||
consumer.write(oneDocReader.fields());
|
||||
consumer.write(MultiFields.getFields(oneDocReader));
|
||||
IOUtils.close(consumer);
|
||||
IOUtils.close(consumer);
|
||||
}
|
||||
|
|
|
@ -193,10 +193,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
|
|||
iw.addDocument(doc);
|
||||
DirectoryReader ir = iw.getReader();
|
||||
LeafReader ar = getOnlyLeafReader(ir);
|
||||
Fields fields = ar.fields();
|
||||
int fieldCount = fields.size();
|
||||
// -1 is allowed, if the codec doesn't implement fields.size():
|
||||
assertTrue(fieldCount == 1 || fieldCount == -1);
|
||||
assertEquals(1, ar.getFieldInfos().size());
|
||||
Terms terms = ar.terms("");
|
||||
assertNotNull(terms);
|
||||
TermsEnum termsEnum = terms.iterator();
|
||||
|
@ -218,10 +215,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
|
|||
iw.addDocument(doc);
|
||||
DirectoryReader ir = iw.getReader();
|
||||
LeafReader ar = getOnlyLeafReader(ir);
|
||||
Fields fields = ar.fields();
|
||||
int fieldCount = fields.size();
|
||||
// -1 is allowed, if the codec doesn't implement fields.size():
|
||||
assertTrue(fieldCount == 1 || fieldCount == -1);
|
||||
assertEquals(1, ar.getFieldInfos().size());
|
||||
Terms terms = ar.terms("");
|
||||
assertNotNull(terms);
|
||||
TermsEnum termsEnum = terms.iterator();
|
||||
|
@ -296,11 +290,10 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest
|
|||
iw.forceMerge(1);
|
||||
DirectoryReader ir = iw.getReader();
|
||||
LeafReader ar = getOnlyLeafReader(ir);
|
||||
Fields fields = ar.fields();
|
||||
// Ghost busting terms dict impls will have
|
||||
// fields.size() == 0; all others must be == 1:
|
||||
assertTrue(fields.size() <= 1);
|
||||
Terms terms = fields.terms("ghostField");
|
||||
assertTrue(ar.getFieldInfos().size() <= 1);
|
||||
Terms terms = ar.terms("ghostField");
|
||||
if (terms != null) {
|
||||
TermsEnum termsEnum = terms.iterator();
|
||||
BytesRef term = termsEnum.next();
|
||||
|
|
|
@ -108,11 +108,10 @@ public final class FieldFilterLeafReader extends FilterLeafReader {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Fields fields() throws IOException {
|
||||
final Fields f = super.fields();
|
||||
return (f == null) ? null : new FieldFilterFields(f);
|
||||
public Terms terms(String field) throws IOException {
|
||||
return hasField(field) ? super.terms(field) : null;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public BinaryDocValues getBinaryDocValues(String field) throws IOException {
|
||||
return hasField(field) ? super.getBinaryDocValues(field) : null;
|
||||
|
@ -145,7 +144,7 @@ public final class FieldFilterLeafReader extends FilterLeafReader {
|
|||
if (negate) sb.append('!');
|
||||
return sb.append(fields).append(')').toString();
|
||||
}
|
||||
|
||||
|
||||
private class FieldFilterFields extends FilterFields {
|
||||
|
||||
public FieldFilterFields(Fields in) {
|
||||
|
|
|
@ -17,17 +17,16 @@
|
|||
package org.apache.lucene.search;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
import junit.framework.Assert;
|
||||
import org.apache.lucene.index.BinaryDocValues;
|
||||
import org.apache.lucene.index.FieldInfo;
|
||||
import org.apache.lucene.index.FieldInfos;
|
||||
import org.apache.lucene.index.Fields;
|
||||
import org.apache.lucene.index.LeafMetaData;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.LeafMetaData;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.MultiReader;
|
||||
|
@ -42,8 +41,6 @@ import org.apache.lucene.util.Bits;
|
|||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import junit.framework.Assert;
|
||||
|
||||
import static junit.framework.Assert.assertEquals;
|
||||
import static junit.framework.Assert.assertFalse;
|
||||
import static junit.framework.Assert.assertTrue;
|
||||
|
@ -172,23 +169,8 @@ public class QueryUtils {
|
|||
return new LeafReader() {
|
||||
|
||||
@Override
|
||||
public Fields fields() throws IOException {
|
||||
return new Fields() {
|
||||
@Override
|
||||
public Iterator<String> iterator() {
|
||||
return Collections.<String>emptyList().iterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Terms terms(String field) throws IOException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return 0;
|
||||
}
|
||||
};
|
||||
public Terms terms(String field) throws IOException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -98,9 +98,9 @@ public final class SlowCompositeReaderWrapper extends LeafReader {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Fields fields() {
|
||||
public Terms terms(String field) throws IOException {
|
||||
ensureOpen();
|
||||
return fields;
|
||||
return fields.terms(field);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -25,9 +25,9 @@ import org.apache.lucene.index.BinaryDocValues;
|
|||
import org.apache.lucene.index.FieldInfo;
|
||||
import org.apache.lucene.index.FieldInfos;
|
||||
import org.apache.lucene.index.Fields;
|
||||
import org.apache.lucene.index.LeafMetaData;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexReaderContext;
|
||||
import org.apache.lucene.index.LeafMetaData;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.MultiReader;
|
||||
|
@ -37,6 +37,7 @@ import org.apache.lucene.index.SortedDocValues;
|
|||
import org.apache.lucene.index.SortedNumericDocValues;
|
||||
import org.apache.lucene.index.SortedSetDocValues;
|
||||
import org.apache.lucene.index.StoredFieldVisitor;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.util.BitSetIterator;
|
||||
|
@ -399,7 +400,7 @@ public class TestDocSet extends LuceneTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Fields fields() {
|
||||
public Terms terms(String field) throws IOException {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue