LUCENE-5305: wrap reader as slow composite reader, until we fix DocumentExpressionDictionary to accept composite readers

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1535455 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2013-10-24 17:55:12 +00:00
parent d49ba41f54
commit b6eab2b39a
1 changed files with 13 additions and 3 deletions

View File

@ -36,6 +36,7 @@ import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.SlowCompositeReaderWrapper;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortField;
import org.apache.lucene.search.spell.Dictionary; import org.apache.lucene.search.spell.Dictionary;
@ -83,7 +84,10 @@ public class DocumentExpressionDictionaryTest extends LuceneTestCase {
} }
writer.commit(); writer.commit();
writer.close(); writer.close();
IndexReader ir = DirectoryReader.open(dir); // TODO: once we fix DocumentExpressionDictionary to
// accept readers with more than one segment, we can
// remove this wrapping:
IndexReader ir = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir));
Set<SortField> sortFields = new HashSet<SortField>(); Set<SortField> sortFields = new HashSet<SortField>();
sortFields.add(new SortField(WEIGHT_FIELD_NAME_1, SortField.Type.LONG)); sortFields.add(new SortField(WEIGHT_FIELD_NAME_1, SortField.Type.LONG));
sortFields.add(new SortField(WEIGHT_FIELD_NAME_2, SortField.Type.LONG)); sortFields.add(new SortField(WEIGHT_FIELD_NAME_2, SortField.Type.LONG));
@ -117,7 +121,10 @@ public class DocumentExpressionDictionaryTest extends LuceneTestCase {
} }
writer.commit(); writer.commit();
writer.close(); writer.close();
IndexReader ir = DirectoryReader.open(dir); // TODO: once we fix DocumentExpressionDictionary to
// accept readers with more than one segment, we can
// remove this wrapping:
IndexReader ir = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir));
Set<SortField> sortFields = new HashSet<SortField>(); Set<SortField> sortFields = new HashSet<SortField>();
sortFields.add(new SortField(WEIGHT_FIELD_NAME_1, SortField.Type.LONG)); sortFields.add(new SortField(WEIGHT_FIELD_NAME_1, SortField.Type.LONG));
sortFields.add(new SortField(WEIGHT_FIELD_NAME_2, SortField.Type.LONG)); sortFields.add(new SortField(WEIGHT_FIELD_NAME_2, SortField.Type.LONG));
@ -171,7 +178,10 @@ public class DocumentExpressionDictionaryTest extends LuceneTestCase {
assertTrue(null!=docs.remove(termToDel)); assertTrue(null!=docs.remove(termToDel));
} }
IndexReader ir = DirectoryReader.open(dir); // TODO: once we fix DocumentExpressionDictionary to
// accept readers with more than one segment, we can
// remove this wrapping:
IndexReader ir = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir));
assertEquals(ir.numDocs(), docs.size()); assertEquals(ir.numDocs(), docs.size());
Set<SortField> sortFields = new HashSet<SortField>(); Set<SortField> sortFields = new HashSet<SortField>();
sortFields.add(new SortField(WEIGHT_FIELD_NAME_1, SortField.Type.LONG)); sortFields.add(new SortField(WEIGHT_FIELD_NAME_1, SortField.Type.LONG));