mirror of https://github.com/apache/lucene.git
LUCENE-1739: when computing FieldInfos.hasProx(), disregard un-indexed fields
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@792918 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
9cbe5f4ff4
commit
ce1231b37c
|
@ -122,9 +122,12 @@ final class FieldInfos {
|
|||
/** Returns true if any fields do not omitTermFreqAndPositions */
|
||||
boolean hasProx() {
|
||||
final int numFields = byNumber.size();
|
||||
for(int i=0;i<numFields;i++)
|
||||
if (!fieldInfo(i).omitTermFreqAndPositions)
|
||||
for(int i=0;i<numFields;i++) {
|
||||
final FieldInfo fi = fieldInfo(i);
|
||||
if (fi.isIndexed && !fi.omitTermFreqAndPositions) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -443,11 +443,7 @@ class SegmentReader extends IndexReader implements Cloneable {
|
|||
instance.openDocStores();
|
||||
}
|
||||
|
||||
boolean anyProx = false;
|
||||
final int numFields = instance.fieldInfos.size();
|
||||
for(int i=0;!anyProx && i<numFields;i++)
|
||||
if (!instance.fieldInfos.fieldInfo(i).omitTermFreqAndPositions)
|
||||
anyProx = true;
|
||||
boolean anyProx = instance.fieldInfos.hasProx();
|
||||
|
||||
instance.tis = new TermInfosReader(cfsDir, instance.segment, instance.fieldInfos, readBufferSize);
|
||||
|
||||
|
|
|
@ -209,22 +209,24 @@ public class TestBackwardsCompatibility extends LuceneTestCase
|
|||
!oldName.startsWith("20.") &&
|
||||
!oldName.startsWith("21.") &&
|
||||
!oldName.startsWith("22.")) {
|
||||
// Test on indices >= 2.3
|
||||
assertEquals(5, fields.size());
|
||||
Field f = (Field) d.getField("id");
|
||||
assertEquals(""+i, f.stringValue());
|
||||
|
||||
f = (Field) d.getField("utf8");
|
||||
assertEquals("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.stringValue());
|
||||
if (d.getField("content3") == null) {
|
||||
assertEquals(5, fields.size());
|
||||
Field f = (Field) d.getField("id");
|
||||
assertEquals(""+i, f.stringValue());
|
||||
|
||||
f = (Field) d.getField("autf8");
|
||||
assertEquals("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.stringValue());
|
||||
f = (Field) d.getField("utf8");
|
||||
assertEquals("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.stringValue());
|
||||
|
||||
f = (Field) d.getField("content2");
|
||||
assertEquals("here is more content with aaa aaa aaa", f.stringValue());
|
||||
f = (Field) d.getField("autf8");
|
||||
assertEquals("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.stringValue());
|
||||
|
||||
f = (Field) d.getField("fie\u2C77ld");
|
||||
assertEquals("field with non-ascii name", f.stringValue());
|
||||
f = (Field) d.getField("content2");
|
||||
assertEquals("here is more content with aaa aaa aaa", f.stringValue());
|
||||
|
||||
f = (Field) d.getField("fie\u2C77ld");
|
||||
assertEquals("field with non-ascii name", f.stringValue());
|
||||
}
|
||||
}
|
||||
} else
|
||||
// Only ID 7 is deleted
|
||||
|
@ -257,10 +259,16 @@ public class TestBackwardsCompatibility extends LuceneTestCase
|
|||
dir.close();
|
||||
}
|
||||
|
||||
private int compare(String name, String v) {
|
||||
int v0 = Integer.parseInt(name.substring(0, 2));
|
||||
int v1 = Integer.parseInt(v);
|
||||
return v0 - v1;
|
||||
}
|
||||
|
||||
/* Open pre-lockless index, add docs, do a delete &
|
||||
* setNorm, and search */
|
||||
public void changeIndexWithAdds(String dirName, boolean autoCommit) throws IOException {
|
||||
|
||||
String origDirName = dirName;
|
||||
dirName = fullDir(dirName);
|
||||
|
||||
Directory dir = FSDirectory.open(new File(dirName));
|
||||
|
@ -274,7 +282,13 @@ public class TestBackwardsCompatibility extends LuceneTestCase
|
|||
}
|
||||
|
||||
// make sure writer sees right total -- writer seems not to know about deletes in .del?
|
||||
assertEquals("wrong doc count", 45, writer.docCount());
|
||||
final int expected;
|
||||
if (compare(origDirName, "24") < 0) {
|
||||
expected = 45;
|
||||
} else {
|
||||
expected = 46;
|
||||
}
|
||||
assertEquals("wrong doc count", expected, writer.docCount());
|
||||
writer.close();
|
||||
|
||||
// make sure searching sees right # hits
|
||||
|
@ -386,6 +400,13 @@ public class TestBackwardsCompatibility extends LuceneTestCase
|
|||
assertEquals("wrong doc count", 35, writer.docCount());
|
||||
writer.close();
|
||||
|
||||
// open fresh writer so we get no prx file in the added segment
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setUseCompoundFile(doCFS);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
addNoProxDoc(writer);
|
||||
writer.close();
|
||||
|
||||
// Delete one doc so we get a .del file:
|
||||
IndexReader reader = IndexReader.open(dir);
|
||||
Term searchTerm = new Term("id", "7");
|
||||
|
@ -492,6 +513,17 @@ public class TestBackwardsCompatibility extends LuceneTestCase
|
|||
writer.addDocument(doc);
|
||||
}
|
||||
|
||||
private void addNoProxDoc(IndexWriter writer) throws IOException {
|
||||
Document doc = new Document();
|
||||
Field f = new Field("content3", "aaa", Field.Store.YES, Field.Index.ANALYZED);
|
||||
f.setOmitTf(true);
|
||||
doc.add(f);
|
||||
f = new Field("content4", "aaa", Field.Store.YES, Field.Index.NO);
|
||||
f.setOmitTf(true);
|
||||
doc.add(f);
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
|
||||
private void rmDir(String dir) throws IOException {
|
||||
File fileDir = new File(fullDir(dir));
|
||||
if (fileDir.exists()) {
|
||||
|
|
Binary file not shown.
Binary file not shown.
Loading…
Reference in New Issue