LUCENE-2858: Rewrite some tests

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene2858@1237322 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Uwe Schindler 2012-01-29 16:14:42 +00:00
parent f38af41141
commit 1963304dbe
14 changed files with 77 additions and 94 deletions

View File

@ -1286,9 +1286,9 @@ public class TestAddIndexes extends LuceneTestCase {
d2.close();
w.forceMerge(1);
IndexReader r3 = w.getReader();
DirectoryReader r3 = w.getReader();
w.close();
IndexReader sr = getOnlySegmentReader(r3);
AtomicIndexReader sr = getOnlySegmentReader(r3);
assertEquals(2, sr.numDocs());
DocValues docValues = sr.docValues("dv");
assertNotNull(docValues);

View File

@ -260,7 +260,7 @@ public class TestCodecs extends LuceneTestCase {
Codec codec = Codec.getDefault();
final SegmentInfo si = new SegmentInfo(SEGMENT, 10000, dir, false, codec, clonedFieldInfos);
final FieldsProducer reader = codec.postingsFormat().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, newIOContext(random), IndexReader.DEFAULT_TERMS_INDEX_DIVISOR));
final FieldsProducer reader = codec.postingsFormat().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, newIOContext(random), DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR));
final FieldsEnum fieldsEnum = reader.iterator();
assertNotNull(fieldsEnum.next());
@ -319,7 +319,7 @@ public class TestCodecs extends LuceneTestCase {
if (VERBOSE) {
System.out.println("TEST: now read postings");
}
final FieldsProducer terms = codec.postingsFormat().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, newIOContext(random), IndexReader.DEFAULT_TERMS_INDEX_DIVISOR));
final FieldsProducer terms = codec.postingsFormat().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, newIOContext(random), DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR));
final Verify[] threads = new Verify[NUM_TEST_THREADS-1];
for(int i=0;i<NUM_TEST_THREADS-1;i++) {

View File

@ -79,7 +79,7 @@ public class TestCustomNorms extends LuceneTestCase {
}
writer.commit();
writer.close();
IndexReader open = SlowCompositeReaderWrapper.wrap(IndexReader.open(dir));
AtomicIndexReader open = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir));
DocValues normValues = open.normValues(floatTestField);
assertNotNull(normValues);
Source source = normValues.getSource();

View File

@ -74,7 +74,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
}
public void onCommit(List<? extends IndexCommit> commits) throws IOException {
IndexCommit lastCommit = commits.get(commits.size()-1);
IndexReader r = IndexReader.open(dir);
DirectoryReader r = DirectoryReader.open(dir);
assertEquals("lastCommit.segmentCount()=" + lastCommit.getSegmentCount() + " vs IndexReader.segmentCount=" + r.getSequentialSubReaders().length, r.getSequentialSubReaders().length, lastCommit.getSegmentCount());
r.close();
verifyCommitOrder(commits);
@ -320,7 +320,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
final boolean needsMerging;
{
IndexReader r = IndexReader.open(dir);
DirectoryReader r = DirectoryReader.open(dir);
needsMerging = r.getSequentialSubReaders().length != 1;
r.close();
}
@ -436,7 +436,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
// Should undo our rollback:
writer.rollback();
IndexReader r = IndexReader.open(dir);
DirectoryReader r = DirectoryReader.open(dir);
// Still merged, still 11 docs
assertEquals(1, r.getSequentialSubReaders().length);
assertEquals(11, r.numDocs());
@ -451,7 +451,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
// Now 8 because we made another commit
assertEquals(7, DirectoryReader.listCommits(dir).size());
r = IndexReader.open(dir);
r = DirectoryReader.open(dir);
// Not fully merged because we rolled it back, and now only
// 10 docs
assertTrue(r.getSequentialSubReaders().length > 1);

View File

@ -91,26 +91,14 @@ public class TestDirectoryReader extends LuceneTestCase {
}
public void testIsCurrent() throws IOException {
Directory ramDir1=newDirectory();
addDoc(random, ramDir1, "test foo", true);
Directory ramDir2=newDirectory();
addDoc(random, ramDir2, "test blah", true);
IndexReader[] readers = new IndexReader[]{IndexReader.open(ramDir1), IndexReader.open(ramDir2)};
MultiReader mr = new MultiReader(readers);
assertTrue(mr.isCurrent()); // just opened, must be current
addDoc(random, ramDir1, "more text", false);
assertFalse(mr.isCurrent()); // has been modified, not current anymore
addDoc(random, ramDir2, "even more text", false);
assertFalse(mr.isCurrent()); // has been modified even more, not current anymore
try {
mr.getVersion();
fail();
} catch (UnsupportedOperationException e) {
// expected exception
}
mr.close();
ramDir1.close();
ramDir2.close();
Directory ramDir=newDirectory();
addDoc(random, ramDir, "test foo", true);
DirectoryReader reader = DirectoryReader.open(ramDir);
assertTrue(reader.isCurrent()); // just opened, must be current
addDoc(random, ramDir, "more text", false);
assertFalse(reader.isCurrent()); // has been modified, not current anymore
reader.close();
ramDir.close();
}
public void testMultiTermDocs() throws IOException {

View File

@ -190,8 +190,8 @@ public class TestDoc extends LuceneTestCase {
private SegmentInfo merge(Directory dir, SegmentInfo si1, SegmentInfo si2, String merged, boolean useCompoundFile)
throws Exception {
IOContext context = newIOContext(random);
SegmentReader r1 = new SegmentReader(si1, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, context);
SegmentReader r2 = new SegmentReader(si2, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, context);
SegmentReader r1 = new SegmentReader(si1, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, context);
SegmentReader r2 = new SegmentReader(si2, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, context);
final Codec codec = Codec.getDefault();
SegmentMerger merger = new SegmentMerger(InfoStream.getDefault(), si1.dir, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, merged, MergeState.CheckAbort.NONE, null, new FieldInfos(new FieldInfos.FieldNumberBiMap()), codec, context);
@ -218,7 +218,7 @@ public class TestDoc extends LuceneTestCase {
private void printSegment(PrintWriter out, SegmentInfo si)
throws Exception {
SegmentReader reader = new SegmentReader(si, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
SegmentReader reader = new SegmentReader(si, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
for (int i = 0; i < reader.numDocs(); i++)
out.println(reader.document(i));

View File

@ -149,7 +149,7 @@ public class TestDocTermOrds extends LuceneTestCase {
w.addDocument(doc);
}
final IndexReader r = w.getReader();
final DirectoryReader r = w.getReader();
w.close();
if (VERBOSE) {
@ -160,7 +160,7 @@ public class TestDocTermOrds extends LuceneTestCase {
if (VERBOSE) {
System.out.println("\nTEST: sub=" + subR);
}
verify(subR, idToOrds, termsArray, null);
verify((AtomicIndexReader) subR, idToOrds, termsArray, null);
}
// Also test top-level reader: its enum does not support
@ -168,9 +168,10 @@ public class TestDocTermOrds extends LuceneTestCase {
if (VERBOSE) {
System.out.println("TEST: top reader");
}
verify(SlowCompositeReaderWrapper.wrap(r), idToOrds, termsArray, null);
AtomicIndexReader slowR = SlowCompositeReaderWrapper.wrap(r);
verify(slowR, idToOrds, termsArray, null);
FieldCache.DEFAULT.purge(r);
FieldCache.DEFAULT.purge(slowR);
r.close();
dir.close();
@ -245,13 +246,14 @@ public class TestDocTermOrds extends LuceneTestCase {
w.addDocument(doc);
}
final IndexReader r = w.getReader();
final DirectoryReader r = w.getReader();
w.close();
if (VERBOSE) {
System.out.println("TEST: reader=" + r);
}
AtomicIndexReader slowR = SlowCompositeReaderWrapper.wrap(r);
for(String prefix : prefixesArray) {
final BytesRef prefixRef = prefix == null ? null : new BytesRef(prefix);
@ -277,7 +279,7 @@ public class TestDocTermOrds extends LuceneTestCase {
if (VERBOSE) {
System.out.println("\nTEST: sub=" + subR);
}
verify(subR, idToOrdsPrefix, termsArray, prefixRef);
verify((AtomicIndexReader) subR, idToOrdsPrefix, termsArray, prefixRef);
}
// Also test top-level reader: its enum does not support
@ -285,16 +287,16 @@ public class TestDocTermOrds extends LuceneTestCase {
if (VERBOSE) {
System.out.println("TEST: top reader");
}
verify(SlowCompositeReaderWrapper.wrap(r), idToOrdsPrefix, termsArray, prefixRef);
verify(slowR, idToOrdsPrefix, termsArray, prefixRef);
}
FieldCache.DEFAULT.purge(r);
FieldCache.DEFAULT.purge(slowR);
r.close();
dir.close();
}
private void verify(IndexReader r, int[][] idToOrds, BytesRef[] termsArray, BytesRef prefixRef) throws Exception {
private void verify(AtomicIndexReader r, int[][] idToOrds, BytesRef[] termsArray, BytesRef prefixRef) throws Exception {
final DocTermOrds dto = new DocTermOrds(r,
"field",

View File

@ -82,7 +82,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
writer.close(true);
IndexReader reader = IndexReader.open(dir, 1);
DirectoryReader reader = DirectoryReader.open(dir, 1);
assertEquals(1, reader.getSequentialSubReaders().length);
IndexSearcher searcher = new IndexSearcher(reader);
@ -694,9 +694,9 @@ public class TestDocValuesIndexing extends LuceneTestCase {
doc.add(f);
w.addDocument(doc);
w.forceMerge(1);
IndexReader r = w.getReader();
DirectoryReader r = w.getReader();
w.close();
assertEquals(17, r.getSequentialSubReaders()[0].docValues("field").load().getInt(0));
assertEquals(17, ((AtomicIndexReader) r.getSequentialSubReaders()[0]).docValues("field").load().getInt(0));
r.close();
d.close();
}
@ -721,9 +721,9 @@ public class TestDocValuesIndexing extends LuceneTestCase {
doc.add(f);
w.addDocument(doc);
w.forceMerge(1);
IndexReader r = w.getReader();
DirectoryReader r = w.getReader();
w.close();
assertEquals(17, r.getSequentialSubReaders()[0].docValues("field").load().getInt(0));
assertEquals(17, getOnlySegmentReader(r).docValues("field").load().getInt(0));
r.close();
d.close();
}
@ -795,11 +795,11 @@ public class TestDocValuesIndexing extends LuceneTestCase {
int ord = asSortedSource.getByValue(expected, actual);
assertEquals(i, ord);
}
reader = SlowCompositeReaderWrapper.wrap(reader);
AtomicIndexReader slowR = SlowCompositeReaderWrapper.wrap(reader);
Set<Entry<String, String>> entrySet = docToString.entrySet();
for (Entry<String, String> entry : entrySet) {
int docId = docId(reader, new Term("id", entry.getKey()));
int docId = docId(slowR, new Term("id", entry.getKey()));
expected.copyChars(entry.getValue());
assertEquals(expected, asSortedSource.getBytes(docId, actual));
}
@ -810,7 +810,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
}
}
public int docId(IndexReader reader, Term term) throws IOException {
public int docId(AtomicIndexReader reader, Term term) throws IOException {
int docFreq = reader.docFreq(term);
assertEquals(1, docFreq);
DocsEnum termDocsEnum = reader.termDocsEnum(null, term.field, term.bytes, false);

View File

@ -70,12 +70,12 @@ public class TestDocsAndPositions extends LuceneTestCase {
AtomicReaderContext[] leaves = ReaderUtil.leaves(topReaderContext);
for (AtomicReaderContext atomicReaderContext : leaves) {
DocsAndPositionsEnum docsAndPosEnum = getDocsAndPositions(
atomicReaderContext.reader, bytes, null);
atomicReaderContext.reader(), bytes, null);
assertNotNull(docsAndPosEnum);
if (atomicReaderContext.reader.maxDoc() == 0) {
if (atomicReaderContext.reader().maxDoc() == 0) {
continue;
}
final int advance = docsAndPosEnum.advance(random.nextInt(atomicReaderContext.reader.maxDoc()));
final int advance = docsAndPosEnum.advance(random.nextInt(atomicReaderContext.reader().maxDoc()));
do {
String msg = "Advanced to: " + advance + " current doc: "
+ docsAndPosEnum.docID(); // TODO: + " usePayloads: " + usePayload;
@ -94,7 +94,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
directory.close();
}
public DocsAndPositionsEnum getDocsAndPositions(IndexReader reader,
public DocsAndPositionsEnum getDocsAndPositions(AtomicIndexReader reader,
BytesRef bytes, Bits liveDocs) throws IOException {
return reader.termPositionsEnum(null, fieldName, bytes, false);
}
@ -146,10 +146,10 @@ public class TestDocsAndPositions extends LuceneTestCase {
AtomicReaderContext[] leaves = ReaderUtil.leaves(topReaderContext);
for (AtomicReaderContext atomicReaderContext : leaves) {
DocsAndPositionsEnum docsAndPosEnum = getDocsAndPositions(
atomicReaderContext.reader, bytes, null);
atomicReaderContext.reader(), bytes, null);
assertNotNull(docsAndPosEnum);
int initDoc = 0;
int maxDoc = atomicReaderContext.reader.maxDoc();
int maxDoc = atomicReaderContext.reader().maxDoc();
// initially advance or do next doc
if (random.nextBoolean()) {
initDoc = docsAndPosEnum.nextDoc();
@ -301,11 +301,11 @@ public class TestDocsAndPositions extends LuceneTestCase {
AtomicReaderContext[] leaves = ReaderUtil.leaves(topReaderContext);
for (AtomicReaderContext atomicReaderContext : leaves) {
DocsAndPositionsEnum docsAndPosEnum = getDocsAndPositions(
atomicReaderContext.reader, bytes, null);
atomicReaderContext.reader(), bytes, null);
assertNotNull(docsAndPosEnum);
int initDoc = 0;
int maxDoc = atomicReaderContext.reader.maxDoc();
int maxDoc = atomicReaderContext.reader().maxDoc();
// initially advance or do next doc
if (random.nextBoolean()) {
initDoc = docsAndPosEnum.nextDoc();
@ -331,8 +331,8 @@ public class TestDocsAndPositions extends LuceneTestCase {
Document doc = new Document();
doc.add(newField("foo", "bar", StringField.TYPE_UNSTORED));
writer.addDocument(doc);
IndexReader reader = writer.getReader();
IndexReader r = getOnlySegmentReader(reader);
DirectoryReader reader = writer.getReader();
AtomicIndexReader r = getOnlySegmentReader(reader);
DocsEnum disi = _TestUtil.docs(random, r, "foo", new BytesRef("bar"), null, null, false);
int docid = disi.docID();
assertTrue(docid == -1 || docid == DocIdSetIterator.NO_MORE_DOCS);
@ -356,8 +356,8 @@ public class TestDocsAndPositions extends LuceneTestCase {
Document doc = new Document();
doc.add(newField("foo", "bar", TextField.TYPE_UNSTORED));
writer.addDocument(doc);
IndexReader reader = writer.getReader();
IndexReader r = getOnlySegmentReader(reader);
DirectoryReader reader = writer.getReader();
AtomicIndexReader r = getOnlySegmentReader(reader);
DocsAndPositionsEnum disi = r.termPositionsEnum(null, "foo", new BytesRef("bar"), false);
int docid = disi.docID();
assertTrue(docid == -1 || docid == DocIdSetIterator.NO_MORE_DOCS);

View File

@ -64,7 +64,7 @@ public class TestDocumentWriter extends LuceneTestCase {
SegmentInfo info = writer.newestSegment();
writer.close();
//After adding the document, we should be able to read it back in
SegmentReader reader = new SegmentReader(info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
assertTrue(reader != null);
Document doc = reader.document(0);
assertTrue(doc != null);
@ -125,7 +125,7 @@ public class TestDocumentWriter extends LuceneTestCase {
writer.commit();
SegmentInfo info = writer.newestSegment();
writer.close();
SegmentReader reader = new SegmentReader(info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
DocsAndPositionsEnum termPositions = MultiFields.getTermPositionsEnum(reader, MultiFields.getLiveDocs(reader),
"repeated", new BytesRef("repeated"), false);
@ -197,7 +197,7 @@ public class TestDocumentWriter extends LuceneTestCase {
writer.commit();
SegmentInfo info = writer.newestSegment();
writer.close();
SegmentReader reader = new SegmentReader(info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
DocsAndPositionsEnum termPositions = MultiFields.getTermPositionsEnum(reader, reader.getLiveDocs(), "f1", new BytesRef("a"), false);
assertTrue(termPositions.nextDoc() != termPositions.NO_MORE_DOCS);
@ -241,7 +241,7 @@ public class TestDocumentWriter extends LuceneTestCase {
writer.commit();
SegmentInfo info = writer.newestSegment();
writer.close();
SegmentReader reader = new SegmentReader(info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
SegmentReader reader = new SegmentReader(info, DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
DocsAndPositionsEnum termPositions = reader.termPositionsEnum(reader.getLiveDocs(), "preanalyzed", new BytesRef("term1"), false);
assertTrue(termPositions.nextDoc() != termPositions.NO_MORE_DOCS);

View File

@ -151,10 +151,6 @@ public class TestDuelingCodecs extends LuceneTestCase {
assertEquals(info, leftReader.numDocs(), rightReader.numDocs());
assertEquals(info, leftReader.numDeletedDocs(), rightReader.numDeletedDocs());
assertEquals(info, leftReader.hasDeletions(), rightReader.hasDeletions());
if (leftReader.getUniqueTermCount() != -1 && rightReader.getUniqueTermCount() != -1) {
assertEquals(info, leftReader.getUniqueTermCount(), rightReader.getUniqueTermCount());
}
}
/**
@ -462,11 +458,13 @@ public class TestDuelingCodecs extends LuceneTestCase {
FieldsEnum fieldsEnum = leftFields.iterator();
String field;
while ((field = fieldsEnum.next()) != null) {
assertEquals(info, leftReader.hasNorms(field), rightReader.hasNorms(field));
if (leftReader.hasNorms(field)) {
DocValues leftNorms = MultiDocValues.getNormDocValues(leftReader, field);
DocValues rightNorms = MultiDocValues.getNormDocValues(rightReader, field);
DocValues leftNorms = MultiDocValues.getNormDocValues(leftReader, field);
DocValues rightNorms = MultiDocValues.getNormDocValues(rightReader, field);
if (leftNorms != null && rightNorms != null) {
assertDocValues(leftNorms, rightNorms);
} else {
assertNull(leftNorms);
assertNull(rightNorms);
}
}
}
@ -519,7 +517,7 @@ public class TestDuelingCodecs extends LuceneTestCase {
private static Set<String> getDVFields(IndexReader reader) {
Set<String> fields = new HashSet<String>();
for(FieldInfo fi : ReaderUtil.getMergedFieldInfos(reader)) {
for(FieldInfo fi : MultiFields.getMergedFieldInfos(reader)) {
if (fi.hasDocValues()) {
fields.add(fi.name);
}
@ -539,7 +537,12 @@ public class TestDuelingCodecs extends LuceneTestCase {
for (String field : leftValues) {
DocValues leftDocValues = MultiDocValues.getDocValues(leftReader, field);
DocValues rightDocValues = MultiDocValues.getDocValues(rightReader, field);
assertDocValues(leftDocValues, rightDocValues);
if (leftDocValues != null && rightDocValues != null) {
assertDocValues(leftDocValues, rightDocValues);
} else {
assertNull(leftDocValues);
assertNull(rightDocValues);
}
}
}

View File

@ -276,13 +276,13 @@ public class TestFieldsReader extends LuceneTestCase {
doc.add(new NumericField("id", id, ft));
w.addDocument(doc);
}
final IndexReader r = w.getReader();
final DirectoryReader r = w.getReader();
w.close();
assertEquals(numDocs, r.numDocs());
for(IndexReader sub : r.getSequentialSubReaders()) {
final int[] ids = FieldCache.DEFAULT.getInts(sub, "id", false);
final int[] ids = FieldCache.DEFAULT.getInts((AtomicIndexReader) sub, "id", false);
for(int docID=0;docID<sub.numDocs();docID++) {
final Document doc = sub.document(docID);
final Field f = (Field) doc.getField("nf");

View File

@ -114,7 +114,7 @@ public class TestFilterIndexReader extends LuceneTestCase {
}
}
public TestReader(IndexReader reader) {
public TestReader(IndexReader reader) throws IOException {
super(SlowCompositeReaderWrapper.wrap(reader));
}
@ -122,11 +122,6 @@ public class TestFilterIndexReader extends LuceneTestCase {
public Fields fields() throws IOException {
return new TestFields(super.fields());
}
@Override
public FieldInfos getFieldInfos() {
return ReaderUtil.getMergedFieldInfos(in);
}
}
/**
@ -183,23 +178,17 @@ public class TestFilterIndexReader extends LuceneTestCase {
}
public void testOverrideMethods() throws Exception {
HashSet<String> methodsThatShouldNotBeOverridden = new HashSet<String>();
methodsThatShouldNotBeOverridden.add("doOpenIfChanged");
methodsThatShouldNotBeOverridden.add("clone");
boolean fail = false;
for (Method m : FilterIndexReader.class.getMethods()) {
int mods = m.getModifiers();
if (Modifier.isStatic(mods) || Modifier.isFinal(mods)) {
if (Modifier.isStatic(mods) || Modifier.isFinal(mods) || m.isSynthetic()) {
continue;
}
Class< ? > declaringClass = m.getDeclaringClass();
Class<?> declaringClass = m.getDeclaringClass();
String name = m.getName();
if (declaringClass != FilterIndexReader.class && declaringClass != Object.class && !methodsThatShouldNotBeOverridden.contains(name)) {
if (declaringClass != FilterIndexReader.class && declaringClass != Object.class) {
System.err.println("method is not overridden by FilterIndexReader: " + name);
fail = true;
} else if (declaringClass == FilterIndexReader.class && methodsThatShouldNotBeOverridden.contains(name)) {
System.err.println("method should not be overridden by FilterIndexReader: " + name);
fail = true;
}
}
assertFalse("FilterIndexReader overrides (or not) some problematic methods; see log above", fail);

View File

@ -69,8 +69,9 @@ public class TestFlex extends LuceneTestCase {
Document doc = new Document();
doc.add(newField("f", "a b c", TextField.TYPE_UNSTORED));
w.addDocument(doc);
IndexReader r = w.getReader();
TermsEnum terms = r.getSequentialSubReaders()[0].fields().terms("f").iterator(null);
w.forceMerge(1);
DirectoryReader r = w.getReader();
TermsEnum terms = getOnlySegmentReader(r).fields().terms("f").iterator(null);
assertTrue(terms.next() != null);
try {
assertEquals(0, terms.ord());