LUCENE-1739: when computing FieldInfos.hasProx(), disregard un-indexed fields

git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@792918 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2009-07-10 12:31:17 +00:00
parent 9cbe5f4ff4
commit ce1231b37c
5 changed files with 52 additions and 21 deletions

View File

@ -122,9 +122,12 @@ final class FieldInfos {
/** Returns true if any fields do not omitTermFreqAndPositions */ /** Returns true if any fields do not omitTermFreqAndPositions */
boolean hasProx() { boolean hasProx() {
final int numFields = byNumber.size(); final int numFields = byNumber.size();
for(int i=0;i<numFields;i++) for(int i=0;i<numFields;i++) {
if (!fieldInfo(i).omitTermFreqAndPositions) final FieldInfo fi = fieldInfo(i);
if (fi.isIndexed && !fi.omitTermFreqAndPositions) {
return true; return true;
}
}
return false; return false;
} }

View File

@ -443,11 +443,7 @@ class SegmentReader extends IndexReader implements Cloneable {
instance.openDocStores(); instance.openDocStores();
} }
boolean anyProx = false; boolean anyProx = instance.fieldInfos.hasProx();
final int numFields = instance.fieldInfos.size();
for(int i=0;!anyProx && i<numFields;i++)
if (!instance.fieldInfos.fieldInfo(i).omitTermFreqAndPositions)
anyProx = true;
instance.tis = new TermInfosReader(cfsDir, instance.segment, instance.fieldInfos, readBufferSize); instance.tis = new TermInfosReader(cfsDir, instance.segment, instance.fieldInfos, readBufferSize);

View File

@ -209,22 +209,24 @@ public class TestBackwardsCompatibility extends LuceneTestCase
!oldName.startsWith("20.") && !oldName.startsWith("20.") &&
!oldName.startsWith("21.") && !oldName.startsWith("21.") &&
!oldName.startsWith("22.")) { !oldName.startsWith("22.")) {
// Test on indices >= 2.3
assertEquals(5, fields.size());
Field f = (Field) d.getField("id");
assertEquals(""+i, f.stringValue());
f = (Field) d.getField("utf8"); if (d.getField("content3") == null) {
assertEquals("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.stringValue()); assertEquals(5, fields.size());
Field f = (Field) d.getField("id");
assertEquals(""+i, f.stringValue());
f = (Field) d.getField("autf8"); f = (Field) d.getField("utf8");
assertEquals("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.stringValue()); assertEquals("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.stringValue());
f = (Field) d.getField("autf8");
assertEquals("Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.stringValue());
f = (Field) d.getField("content2"); f = (Field) d.getField("content2");
assertEquals("here is more content with aaa aaa aaa", f.stringValue()); assertEquals("here is more content with aaa aaa aaa", f.stringValue());
f = (Field) d.getField("fie\u2C77ld"); f = (Field) d.getField("fie\u2C77ld");
assertEquals("field with non-ascii name", f.stringValue()); assertEquals("field with non-ascii name", f.stringValue());
}
} }
} else } else
// Only ID 7 is deleted // Only ID 7 is deleted
@ -257,10 +259,16 @@ public class TestBackwardsCompatibility extends LuceneTestCase
dir.close(); dir.close();
} }
private int compare(String name, String v) {
int v0 = Integer.parseInt(name.substring(0, 2));
int v1 = Integer.parseInt(v);
return v0 - v1;
}
/* Open pre-lockless index, add docs, do a delete & /* Open pre-lockless index, add docs, do a delete &
* setNorm, and search */ * setNorm, and search */
public void changeIndexWithAdds(String dirName, boolean autoCommit) throws IOException { public void changeIndexWithAdds(String dirName, boolean autoCommit) throws IOException {
String origDirName = dirName;
dirName = fullDir(dirName); dirName = fullDir(dirName);
Directory dir = FSDirectory.open(new File(dirName)); Directory dir = FSDirectory.open(new File(dirName));
@ -274,7 +282,13 @@ public class TestBackwardsCompatibility extends LuceneTestCase
} }
// make sure writer sees right total -- writer seems not to know about deletes in .del? // make sure writer sees right total -- writer seems not to know about deletes in .del?
assertEquals("wrong doc count", 45, writer.docCount()); final int expected;
if (compare(origDirName, "24") < 0) {
expected = 45;
} else {
expected = 46;
}
assertEquals("wrong doc count", expected, writer.docCount());
writer.close(); writer.close();
// make sure searching sees right # hits // make sure searching sees right # hits
@ -386,6 +400,13 @@ public class TestBackwardsCompatibility extends LuceneTestCase
assertEquals("wrong doc count", 35, writer.docCount()); assertEquals("wrong doc count", 35, writer.docCount());
writer.close(); writer.close();
// open fresh writer so we get no prx file in the added segment
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(doCFS);
writer.setMaxBufferedDocs(10);
addNoProxDoc(writer);
writer.close();
// Delete one doc so we get a .del file: // Delete one doc so we get a .del file:
IndexReader reader = IndexReader.open(dir); IndexReader reader = IndexReader.open(dir);
Term searchTerm = new Term("id", "7"); Term searchTerm = new Term("id", "7");
@ -492,6 +513,17 @@ public class TestBackwardsCompatibility extends LuceneTestCase
writer.addDocument(doc); writer.addDocument(doc);
} }
private void addNoProxDoc(IndexWriter writer) throws IOException {
Document doc = new Document();
Field f = new Field("content3", "aaa", Field.Store.YES, Field.Index.ANALYZED);
f.setOmitTf(true);
doc.add(f);
f = new Field("content4", "aaa", Field.Store.YES, Field.Index.NO);
f.setOmitTf(true);
doc.add(f);
writer.addDocument(doc);
}
private void rmDir(String dir) throws IOException { private void rmDir(String dir) throws IOException {
File fileDir = new File(fullDir(dir)); File fileDir = new File(fullDir(dir));
if (fileDir.exists()) { if (fileDir.exists()) {