LUCENE-2858: Add a workaround for the broken PayloadProcessorProvider API that behaves like before our change (IR.directory() threw UOE for all non-directory based readers). I will open an issue to make PayloadProcessorProvider to get an AtomicReader instance as selection criteria for the processor

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene2858@1237692 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Uwe Schindler 2012-01-30 14:56:02 +00:00
parent 4091d801c1
commit 9a6e69cf8d
3 changed files with 8 additions and 15 deletions

View File

@ -323,9 +323,12 @@ final class SegmentMerger {
docBase += docCount;
if (mergeState.payloadProcessorProvider != null) {
// nocommit: this does not work anymore as SR/AtomicIndexReader does not know the directory anymore:
// mergeState.dirPayloadProcessor[i] = mergeState.payloadProcessorProvider.getDirProcessor(reader.reader.directory());
throw new UnsupportedOperationException("PayloadProcessorProvider is not supported at the moment :(");
// TODO: the PayloadProcessorProvider should take AtomicReader as parameter
// and find out by itself if it can provide a processor:
if (!(reader.reader instanceof SegmentReader))
throw new UnsupportedOperationException("Payload processing currently requires exclusively SegmentReaders to be merged.");
final Directory dir = ((SegmentReader) reader.reader).directory();
mergeState.dirPayloadProcessor[i] = mergeState.payloadProcessorProvider.getDirProcessor(dir);
}
i++;

View File

@ -37,7 +37,6 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
import org.junit.Ignore;
public class TestPayloadProcessorProvider extends LuceneTestCase {
@ -215,8 +214,6 @@ public class TestPayloadProcessorProvider extends LuceneTestCase {
dir.close();
}
@Ignore("This test does not work, as PerDirPayloadProcessor is currently broken (see nocommit in SegmentMerger): "+
"SegmentReader/AtomicReader does not know its directory. This is broken, it should be a PayLoadProcessorProvider per AtomicReader!")
@Test
public void testAddIndexes() throws Exception {
// addIndexes - single commit in each
@ -226,8 +223,6 @@ public class TestPayloadProcessorProvider extends LuceneTestCase {
doTest(random, true, 0, true);
}
@Ignore("This test does not work, as PerDirPayloadProcessor is currently broken (see nocommit in SegmentMerger): "+
"SegmentReader/AtomicReader does not know its directory. This is broken, it should be a PayLoadProcessorProvider per AtomicReader!")
@Test
public void testAddIndexesIntoExisting() throws Exception {
// addIndexes - single commit in each
@ -237,8 +232,6 @@ public class TestPayloadProcessorProvider extends LuceneTestCase {
doTest(random, false, NUM_DOCS, true);
}
@Ignore("This test does not work, as PerDirPayloadProcessor is currently broken (see nocommit in SegmentMerger): "+
"SegmentReader/AtomicReader does not know its directory. This is broken, it should be a PayLoadProcessorProvider per AtomicReader!")
@Test
public void testRegularMerges() throws Exception {
Directory dir = newDirectory();

View File

@ -7,13 +7,12 @@ import java.util.List;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.store.Directory;
import org.junit.Ignore;
import org.junit.Test;
import org.apache.lucene.util.LuceneTestCase;
@ -48,8 +47,6 @@ public class FacetsPayloadProcessorProviderTest extends LuceneTestCase {
private static final int NUM_DOCS = 100;
@Ignore("This test does not work, as PerDirPayloadProcessor is currently broken (see nocommit in SegmentMerger): "+
"SegmentReader/AtomicReader does not know its directory. This is broken, it should be a PayLoadProcessorProvider per AtomicReader!")
@Test
public void testTaxonomyMergeUtils() throws Exception {
Directory dir = newDirectory();
@ -70,7 +67,7 @@ public class FacetsPayloadProcessorProviderTest extends LuceneTestCase {
}
private void verifyResults(Directory dir, Directory taxDir) throws IOException {
IndexReader reader1 = IndexReader.open(dir);
DirectoryReader reader1 = DirectoryReader.open(dir);
DirectoryTaxonomyReader taxReader = new DirectoryTaxonomyReader(taxDir);
IndexSearcher searcher = newSearcher(reader1);
FacetSearchParams fsp = new FacetSearchParams();