mirror of https://github.com/apache/lucene.git
LUCENE-6068: LeafReader.fields never returns null
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1641239 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
6c634c2920
commit
773ed71e5a
|
@ -223,6 +223,8 @@ API Changes
|
|||
sort/group/etc on a misconfigured field (e.g. no docvalues, no UninvertingReader, etc).
|
||||
(Robert Muir)
|
||||
|
||||
* LUCENE-6068: LeafReader.fields() never returns null. (Robert Muir)
|
||||
|
||||
Bug Fixes
|
||||
|
||||
* LUCENE-5650: Enforce read-only access to any path outside the temporary
|
||||
|
|
|
@ -93,11 +93,9 @@ public abstract class FieldsConsumer implements Closeable {
|
|||
final FieldsProducer f = mergeState.fieldsProducers[readerIndex];
|
||||
|
||||
final int maxDoc = mergeState.maxDocs[readerIndex];
|
||||
if (f != null) {
|
||||
f.checkIntegrity();
|
||||
slices.add(new ReaderSlice(docBase, maxDoc, readerIndex));
|
||||
fields.add(f);
|
||||
}
|
||||
f.checkIntegrity();
|
||||
slices.add(new ReaderSlice(docBase, maxDoc, readerIndex));
|
||||
fields.add(f);
|
||||
docBase += maxDoc;
|
||||
}
|
||||
|
||||
|
|
|
@ -378,10 +378,6 @@ class BufferedUpdatesStream implements Accountable {
|
|||
private synchronized long applyTermDeletes(Iterable<Term> termsIter, ReadersAndUpdates rld, SegmentReader reader) throws IOException {
|
||||
long delCount = 0;
|
||||
Fields fields = reader.fields();
|
||||
if (fields == null) {
|
||||
// This reader has no postings
|
||||
return 0;
|
||||
}
|
||||
|
||||
TermsEnum termsEnum = null;
|
||||
|
||||
|
@ -451,10 +447,6 @@ class BufferedUpdatesStream implements Accountable {
|
|||
private synchronized void applyDocValuesUpdates(Iterable<? extends DocValuesUpdate> updates,
|
||||
ReadersAndUpdates rld, SegmentReader reader, DocValuesFieldUpdates.Container dvUpdatesContainer) throws IOException {
|
||||
Fields fields = reader.fields();
|
||||
if (fields == null) {
|
||||
// This reader has no postings
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: we can process the updates per DV field, from last to first so that
|
||||
// if multiple terms affect same document for the same field, we add an update
|
||||
|
|
|
@ -882,11 +882,6 @@ public class CheckIndex implements Closeable {
|
|||
final Status.TermIndexStatus status = new Status.TermIndexStatus();
|
||||
int computedFieldCount = 0;
|
||||
|
||||
if (fields == null) {
|
||||
msg(infoStream, "OK [no fields/terms]");
|
||||
return status;
|
||||
}
|
||||
|
||||
DocsEnum docs = null;
|
||||
DocsEnum docsAndFreqs = null;
|
||||
DocsAndPositionsEnum postings = null;
|
||||
|
|
|
@ -80,11 +80,7 @@ public class ExitableDirectoryReader extends FilterDirectoryReader {
|
|||
|
||||
@Override
|
||||
public Fields fields() throws IOException {
|
||||
Fields fields = super.fields();
|
||||
if (fields == null) {
|
||||
return null;
|
||||
}
|
||||
return new ExitableFields(fields, queryTimeout);
|
||||
return new ExitableFields(super.fields(), queryTimeout);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -136,18 +136,13 @@ public abstract class LeafReader extends IndexReader {
|
|||
|
||||
/**
|
||||
* Returns {@link Fields} for this reader.
|
||||
* This method may return null if the reader has no
|
||||
* postings.
|
||||
* This method will not return null.
|
||||
*/
|
||||
public abstract Fields fields() throws IOException;
|
||||
|
||||
@Override
|
||||
public final int docFreq(Term term) throws IOException {
|
||||
final Fields fields = fields();
|
||||
if (fields == null) {
|
||||
return 0;
|
||||
}
|
||||
final Terms terms = fields.terms(term.field());
|
||||
final Terms terms = terms(term.field());
|
||||
if (terms == null) {
|
||||
return 0;
|
||||
}
|
||||
|
@ -166,11 +161,7 @@ public abstract class LeafReader extends IndexReader {
|
|||
* away. */
|
||||
@Override
|
||||
public final long totalTermFreq(Term term) throws IOException {
|
||||
final Fields fields = fields();
|
||||
if (fields == null) {
|
||||
return 0;
|
||||
}
|
||||
final Terms terms = fields.terms(term.field());
|
||||
final Terms terms = terms(term.field());
|
||||
if (terms == null) {
|
||||
return 0;
|
||||
}
|
||||
|
@ -211,11 +202,7 @@ public abstract class LeafReader extends IndexReader {
|
|||
|
||||
/** This may return null if the field does not exist.*/
|
||||
public final Terms terms(String field) throws IOException {
|
||||
final Fields fields = fields();
|
||||
if (fields == null) {
|
||||
return null;
|
||||
}
|
||||
return fields.terms(field);
|
||||
return fields().terms(field);
|
||||
}
|
||||
|
||||
/** Returns {@link DocsEnum} for the specified term.
|
||||
|
@ -225,14 +212,11 @@ public abstract class LeafReader extends IndexReader {
|
|||
public final DocsEnum termDocsEnum(Term term) throws IOException {
|
||||
assert term.field() != null;
|
||||
assert term.bytes() != null;
|
||||
final Fields fields = fields();
|
||||
if (fields != null) {
|
||||
final Terms terms = fields.terms(term.field());
|
||||
if (terms != null) {
|
||||
final TermsEnum termsEnum = terms.iterator(null);
|
||||
if (termsEnum.seekExact(term.bytes())) {
|
||||
return termsEnum.docs(getLiveDocs(), null);
|
||||
}
|
||||
final Terms terms = terms(term.field());
|
||||
if (terms != null) {
|
||||
final TermsEnum termsEnum = terms.iterator(null);
|
||||
if (termsEnum.seekExact(term.bytes())) {
|
||||
return termsEnum.docs(getLiveDocs(), null);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
|
@ -245,14 +229,11 @@ public abstract class LeafReader extends IndexReader {
|
|||
public final DocsAndPositionsEnum termPositionsEnum(Term term) throws IOException {
|
||||
assert term.field() != null;
|
||||
assert term.bytes() != null;
|
||||
final Fields fields = fields();
|
||||
if (fields != null) {
|
||||
final Terms terms = fields.terms(term.field());
|
||||
if (terms != null) {
|
||||
final TermsEnum termsEnum = terms.iterator(null);
|
||||
if (termsEnum.seekExact(term.bytes())) {
|
||||
return termsEnum.docsAndPositions(getLiveDocs(), null);
|
||||
}
|
||||
final Terms terms = terms(term.field());
|
||||
if (terms != null) {
|
||||
final TermsEnum termsEnum = terms.iterator(null);
|
||||
if (termsEnum.seekExact(term.bytes())) {
|
||||
return termsEnum.docsAndPositions(getLiveDocs(), null);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
|
|
|
@ -129,10 +129,7 @@ public class MergeState {
|
|||
if (termVectorsReader != null) {
|
||||
termVectorsReader = termVectorsReader.getMergeInstance();
|
||||
}
|
||||
fieldsProducer = segmentReader.fields();
|
||||
if (fieldsProducer != null) {
|
||||
fieldsProducer = fieldsProducer.getMergeInstance();
|
||||
}
|
||||
fieldsProducer = segmentReader.fields().getMergeInstance();
|
||||
} else {
|
||||
// A "foreign" reader
|
||||
normsProducer = readerToNormsProducer(reader);
|
||||
|
|
|
@ -62,9 +62,6 @@ public final class MultiFields extends Fields {
|
|||
public static Fields getFields(IndexReader reader) throws IOException {
|
||||
final List<LeafReaderContext> leaves = reader.leaves();
|
||||
switch (leaves.size()) {
|
||||
case 0:
|
||||
// no fields
|
||||
return null;
|
||||
case 1:
|
||||
// already an atomic reader / reader with one leave
|
||||
return leaves.get(0).reader().fields();
|
||||
|
@ -74,14 +71,10 @@ public final class MultiFields extends Fields {
|
|||
for (final LeafReaderContext ctx : leaves) {
|
||||
final LeafReader r = ctx.reader();
|
||||
final Fields f = r.fields();
|
||||
if (f != null) {
|
||||
fields.add(f);
|
||||
slices.add(new ReaderSlice(ctx.docBase, r.maxDoc(), fields.size()-1));
|
||||
}
|
||||
fields.add(f);
|
||||
slices.add(new ReaderSlice(ctx.docBase, r.maxDoc(), fields.size()-1));
|
||||
}
|
||||
if (fields.isEmpty()) {
|
||||
return null;
|
||||
} else if (fields.size() == 1) {
|
||||
if (fields.size() == 1) {
|
||||
return fields.get(0);
|
||||
} else {
|
||||
return new MultiFields(fields.toArray(Fields.EMPTY_ARRAY),
|
||||
|
@ -124,12 +117,7 @@ public final class MultiFields extends Fields {
|
|||
|
||||
/** This method may return null if the field does not exist.*/
|
||||
public static Terms getTerms(IndexReader r, String field) throws IOException {
|
||||
final Fields fields = getFields(r);
|
||||
if (fields == null) {
|
||||
return null;
|
||||
} else {
|
||||
return fields.terms(field);
|
||||
}
|
||||
return getFields(r).terms(field);
|
||||
}
|
||||
|
||||
/** Returns {@link DocsEnum} for the specified field &
|
||||
|
|
|
@ -120,12 +120,10 @@ public class ParallelLeafReader extends LeafReader {
|
|||
// build Fields instance
|
||||
for (final LeafReader reader : this.parallelReaders) {
|
||||
final Fields readerFields = reader.fields();
|
||||
if (readerFields != null) {
|
||||
for (String field : readerFields) {
|
||||
// only add if the reader responsible for that field name is the current:
|
||||
if (fieldToReader.get(field) == reader) {
|
||||
this.fields.addField(field, readerFields.terms(field));
|
||||
}
|
||||
for (String field : readerFields) {
|
||||
// only add if the reader responsible for that field name is the current:
|
||||
if (fieldToReader.get(field) == reader) {
|
||||
this.fields.addField(field, readerFields.terms(field));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -503,9 +503,7 @@ public final class SegmentReader extends LeafReader implements Accountable {
|
|||
public Iterable<? extends Accountable> getChildResources() {
|
||||
ensureOpen();
|
||||
List<Accountable> resources = new ArrayList<>();
|
||||
if (core.fields != null) {
|
||||
resources.add(Accountables.namedAccountable("postings", core.fields));
|
||||
}
|
||||
resources.add(Accountables.namedAccountable("postings", core.fields));
|
||||
if (core.normsProducer != null) {
|
||||
resources.add(Accountables.namedAccountable("norms", core.normsProducer));
|
||||
}
|
||||
|
|
|
@ -87,16 +87,13 @@ public final class TermContext {
|
|||
//if (DEBUG) System.out.println("prts.build term=" + term);
|
||||
for (final LeafReaderContext ctx : context.leaves()) {
|
||||
//if (DEBUG) System.out.println(" r=" + leaves[i].reader);
|
||||
final Fields fields = ctx.reader().fields();
|
||||
if (fields != null) {
|
||||
final Terms terms = fields.terms(field);
|
||||
if (terms != null) {
|
||||
final TermsEnum termsEnum = terms.iterator(null);
|
||||
if (termsEnum.seekExact(bytes)) {
|
||||
final TermState termState = termsEnum.termState();
|
||||
//if (DEBUG) System.out.println(" found");
|
||||
perReaderTermState.register(termState, ctx.ord, termsEnum.docFreq(), termsEnum.totalTermFreq());
|
||||
}
|
||||
final Terms terms = ctx.reader().terms(field);
|
||||
if (terms != null) {
|
||||
final TermsEnum termsEnum = terms.iterator(null);
|
||||
if (termsEnum.seekExact(bytes)) {
|
||||
final TermState termState = termsEnum.termState();
|
||||
//if (DEBUG) System.out.println(" found");
|
||||
perReaderTermState.register(termState, ctx.ord, termsEnum.docFreq(), termsEnum.totalTermFreq());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -84,14 +84,7 @@ public class MultiTermQueryWrapperFilter<Q extends MultiTermQuery> extends Filte
|
|||
*/
|
||||
@Override
|
||||
public DocIdSet getDocIdSet(LeafReaderContext context, Bits acceptDocs) throws IOException {
|
||||
final LeafReader reader = context.reader();
|
||||
final Fields fields = reader.fields();
|
||||
if (fields == null) {
|
||||
// reader has no fields
|
||||
return null;
|
||||
}
|
||||
|
||||
final Terms terms = fields.terms(query.field);
|
||||
final Terms terms = context.reader().terms(query.field);
|
||||
if (terms == null) {
|
||||
// field does not exist
|
||||
return null;
|
||||
|
|
|
@ -47,13 +47,7 @@ abstract class TermCollectingRewrite<Q extends Query> extends MultiTermQuery.Rew
|
|||
final void collectTerms(IndexReader reader, MultiTermQuery query, TermCollector collector) throws IOException {
|
||||
IndexReaderContext topReaderContext = reader.getContext();
|
||||
for (LeafReaderContext context : topReaderContext.leaves()) {
|
||||
final Fields fields = context.reader().fields();
|
||||
if (fields == null) {
|
||||
// reader has no fields
|
||||
continue;
|
||||
}
|
||||
|
||||
final Terms terms = fields.terms(query.field);
|
||||
final Terms terms = context.reader().terms(query.field);
|
||||
if (terms == null) {
|
||||
// field does not exist
|
||||
continue;
|
||||
|
|
|
@ -93,16 +93,11 @@ public class SpanTermQuery extends SpanQuery {
|
|||
if (termContext == null) {
|
||||
// this happens with span-not query, as it doesn't include the NOT side in extractTerms()
|
||||
// so we seek to the term now in this segment..., this sucks because its ugly mostly!
|
||||
final Fields fields = context.reader().fields();
|
||||
if (fields != null) {
|
||||
final Terms terms = fields.terms(term.field());
|
||||
if (terms != null) {
|
||||
final TermsEnum termsEnum = terms.iterator(null);
|
||||
if (termsEnum.seekExact(term.bytes())) {
|
||||
state = termsEnum.termState();
|
||||
} else {
|
||||
state = null;
|
||||
}
|
||||
final Terms terms = context.reader().terms(term.field());
|
||||
if (terms != null) {
|
||||
final TermsEnum termsEnum = terms.iterator(null);
|
||||
if (termsEnum.seekExact(term.bytes())) {
|
||||
state = termsEnum.termState();
|
||||
} else {
|
||||
state = null;
|
||||
}
|
||||
|
|
|
@ -58,9 +58,6 @@ public class TestDocCount extends LuceneTestCase {
|
|||
|
||||
private void verifyCount(IndexReader ir) throws Exception {
|
||||
Fields fields = MultiFields.getFields(ir);
|
||||
if (fields == null) {
|
||||
return;
|
||||
}
|
||||
for (String field : fields) {
|
||||
Terms terms = fields.terms(field);
|
||||
if (terms == null) {
|
||||
|
|
|
@ -330,7 +330,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
|
|||
final Bits liveDocs2 = MultiFields.getLiveDocs(r2);
|
||||
|
||||
Fields fields = MultiFields.getFields(r2);
|
||||
if (fields == null) {
|
||||
if (fields.size() == 0) {
|
||||
// make sure r1 is in fact empty (eg has only all
|
||||
// deleted docs):
|
||||
Bits liveDocs = MultiFields.getLiveDocs(r1);
|
||||
|
|
|
@ -770,12 +770,7 @@ public class SortingLeafReader extends FilterLeafReader {
|
|||
|
||||
@Override
|
||||
public Fields fields() throws IOException {
|
||||
Fields fields = in.fields();
|
||||
if (fields == null) {
|
||||
return null;
|
||||
} else {
|
||||
return new SortingFields(fields, in.getFieldInfos(), docMap);
|
||||
}
|
||||
return new SortingFields(in.fields(), in.getFieldInfos(), docMap);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -96,19 +96,17 @@ public class HighFreqTerms {
|
|||
TermStatsQueue tiq = null;
|
||||
|
||||
if (field != null) {
|
||||
Fields fields = MultiFields.getFields(reader);
|
||||
if (fields == null) {
|
||||
Terms terms = MultiFields.getTerms(reader, field);
|
||||
if (terms == null) {
|
||||
throw new RuntimeException("field " + field + " not found");
|
||||
}
|
||||
Terms terms = fields.terms(field);
|
||||
if (terms != null) {
|
||||
TermsEnum termsEnum = terms.iterator(null);
|
||||
tiq = new TermStatsQueue(numTerms, comparator);
|
||||
tiq.fill(field, termsEnum);
|
||||
}
|
||||
|
||||
TermsEnum termsEnum = terms.iterator(null);
|
||||
tiq = new TermStatsQueue(numTerms, comparator);
|
||||
tiq.fill(field, termsEnum);
|
||||
} else {
|
||||
Fields fields = MultiFields.getFields(reader);
|
||||
if (fields == null) {
|
||||
if (fields.size() == 0) {
|
||||
throw new RuntimeException("no fields found for this index");
|
||||
}
|
||||
tiq = new TermStatsQueue(numTerms, comparator);
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.apache.lucene.index.DocValuesType;
|
|||
import org.apache.lucene.index.DocsAndPositionsEnum;
|
||||
import org.apache.lucene.index.DocsEnum;
|
||||
import org.apache.lucene.index.FieldInfo;
|
||||
import org.apache.lucene.index.Fields;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.SortedSetDocValues;
|
||||
import org.apache.lucene.index.Terms;
|
||||
|
@ -234,11 +233,7 @@ public class DocTermOrds implements Accountable {
|
|||
public TermsEnum getOrdTermsEnum(LeafReader reader) throws IOException {
|
||||
if (indexedTermsArray == null) {
|
||||
//System.out.println("GET normal enum");
|
||||
final Fields fields = reader.fields();
|
||||
if (fields == null) {
|
||||
return null;
|
||||
}
|
||||
final Terms terms = fields.terms(field);
|
||||
final Terms terms = reader.terms(field);
|
||||
if (terms == null) {
|
||||
return null;
|
||||
} else {
|
||||
|
@ -289,12 +284,7 @@ public class DocTermOrds implements Accountable {
|
|||
final int[] lastTerm = new int[maxDoc]; // last term we saw for this document
|
||||
final byte[][] bytes = new byte[maxDoc][]; // list of term numbers for the doc (delta encoded vInts)
|
||||
|
||||
final Fields fields = reader.fields();
|
||||
if (fields == null) {
|
||||
// No terms
|
||||
return;
|
||||
}
|
||||
final Terms terms = fields.terms(field);
|
||||
final Terms terms = reader.terms(field);
|
||||
if (terms == null) {
|
||||
// No terms
|
||||
return;
|
||||
|
|
|
@ -239,10 +239,6 @@ public class CommonTermsQuery extends Query {
|
|||
TermsEnum termsEnum = null;
|
||||
for (LeafReaderContext context : leaves) {
|
||||
final Fields fields = context.reader().fields();
|
||||
if (fields == null) {
|
||||
// reader has no fields
|
||||
continue;
|
||||
}
|
||||
for (int i = 0; i < queryTerms.length; i++) {
|
||||
Term term = queryTerms[i];
|
||||
TermContext termContext = contextArray[i];
|
||||
|
|
|
@ -186,9 +186,6 @@ public final class TermsFilter extends Filter {
|
|||
BitDocIdSet.Builder builder = new BitDocIdSet.Builder(reader.maxDoc());
|
||||
final Fields fields = reader.fields();
|
||||
final BytesRef spare = new BytesRef(this.termsBytes);
|
||||
if (fields == null) {
|
||||
return builder.build();
|
||||
}
|
||||
Terms terms = null;
|
||||
TermsEnum termsEnum = null;
|
||||
DocsEnum docs = null;
|
||||
|
|
|
@ -60,9 +60,7 @@ public class SumTotalTermFreqValueSource extends ValueSource {
|
|||
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
|
||||
long sumTotalTermFreq = 0;
|
||||
for (LeafReaderContext readerContext : searcher.getTopReaderContext().leaves()) {
|
||||
Fields fields = readerContext.reader().fields();
|
||||
if (fields == null) continue;
|
||||
Terms terms = fields.terms(indexedField);
|
||||
Terms terms = readerContext.reader().terms(indexedField);
|
||||
if (terms == null) continue;
|
||||
long v = terms.getSumTotalTermFreq();
|
||||
if (v == -1) {
|
||||
|
|
|
@ -43,8 +43,7 @@ public class AssertingLeafReader extends FilterLeafReader {
|
|||
|
||||
@Override
|
||||
public Fields fields() throws IOException {
|
||||
Fields fields = super.fields();
|
||||
return fields == null ? null : new AssertingFields(fields);
|
||||
return new AssertingFields(super.fields());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -60,17 +60,14 @@ public class PerThreadPKLookup {
|
|||
int numSegs = 0;
|
||||
boolean hasDeletions = false;
|
||||
for(int i=0;i<leaves.size();i++) {
|
||||
Fields fields = leaves.get(i).reader().fields();
|
||||
if (fields != null) {
|
||||
Terms terms = fields.terms(idFieldName);
|
||||
if (terms != null) {
|
||||
termsEnums[numSegs] = terms.iterator(null);
|
||||
assert termsEnums[numSegs] != null;
|
||||
docBases[numSegs] = leaves.get(i).docBase;
|
||||
liveDocs[numSegs] = leaves.get(i).reader().getLiveDocs();
|
||||
hasDeletions |= leaves.get(i).reader().hasDeletions();
|
||||
numSegs++;
|
||||
}
|
||||
Terms terms = leaves.get(i).reader().terms(idFieldName);
|
||||
if (terms != null) {
|
||||
termsEnums[numSegs] = terms.iterator(null);
|
||||
assert termsEnums[numSegs] != null;
|
||||
docBases[numSegs] = leaves.get(i).docBase;
|
||||
liveDocs[numSegs] = leaves.get(i).reader().getLiveDocs();
|
||||
hasDeletions |= leaves.get(i).reader().hasDeletions();
|
||||
numSegs++;
|
||||
}
|
||||
}
|
||||
this.numSegs = numSegs;
|
||||
|
|
|
@ -353,9 +353,6 @@ public abstract class ThreadedIndexingAndSearchingTestCase extends LuceneTestCas
|
|||
if (s.getIndexReader().numDocs() > 0) {
|
||||
smokeTestSearcher(s);
|
||||
Fields fields = MultiFields.getFields(s.getIndexReader());
|
||||
if (fields == null) {
|
||||
continue;
|
||||
}
|
||||
Terms terms = fields.terms("body");
|
||||
if (terms == null) {
|
||||
continue;
|
||||
|
|
|
@ -596,14 +596,8 @@ public class LukeRequestHandler extends RequestHandlerBase
|
|||
|
||||
final CharsRefBuilder spare = new CharsRefBuilder();
|
||||
|
||||
Fields fields = MultiFields.getFields(req.getSearcher().getIndexReader());
|
||||
|
||||
if (fields == null) { // No indexed fields
|
||||
return;
|
||||
}
|
||||
|
||||
Terms terms = fields.terms(field);
|
||||
if (terms == null) { // No terms in the field.
|
||||
Terms terms = MultiFields.getTerms(req.getSearcher().getIndexReader(), field);
|
||||
if (terms == null) { // field does not exist
|
||||
return;
|
||||
}
|
||||
TermsEnum termsEnum = terms.iterator(null);
|
||||
|
|
|
@ -124,9 +124,9 @@ public class TermsComponent extends SearchComponent {
|
|||
NamedList<Integer> fieldTerms = new NamedList<>();
|
||||
termsResult.add(field, fieldTerms);
|
||||
|
||||
Terms terms = lfields == null ? null : lfields.terms(field);
|
||||
Terms terms = lfields.terms(field);
|
||||
if (terms == null) {
|
||||
// no terms for this field
|
||||
// field does not exist
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
|
@ -780,9 +780,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn
|
|||
* @return the first document number containing the term
|
||||
*/
|
||||
public int getFirstMatch(Term t) throws IOException {
|
||||
Fields fields = leafReader.fields();
|
||||
if (fields == null) return -1;
|
||||
Terms terms = fields.terms(t.field());
|
||||
Terms terms = leafReader.terms(t.field());
|
||||
if (terms == null) return -1;
|
||||
BytesRef termBytes = t.bytes();
|
||||
final TermsEnum termsEnum = terms.iterator(null);
|
||||
|
|
|
@ -126,9 +126,7 @@ public class TestRTGBase extends SolrTestCaseJ4 {
|
|||
|
||||
|
||||
protected int getFirstMatch(IndexReader r, Term t) throws IOException {
|
||||
Fields fields = MultiFields.getFields(r);
|
||||
if (fields == null) return -1;
|
||||
Terms terms = fields.terms(t.field());
|
||||
Terms terms = MultiFields.getTerms(r, t.field());
|
||||
if (terms == null) return -1;
|
||||
BytesRef termBytes = t.bytes();
|
||||
final TermsEnum termsEnum = terms.iterator(null);
|
||||
|
|
Loading…
Reference in New Issue