diff --git a/CHANGES.txt b/CHANGES.txt index 75d0942faae..0d53b9f06b0 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -57,6 +57,9 @@ Bug fixes 8. LUCENE-970: FilterIndexReader now implements isOptimized(). Before a call of isOptimized() would throw a NPE. (Michael Busch) + + 9. LUCENE-832: ParallelReader fixed to not throw NPE if isCurrent(), + isOptimized() or getVersion() is called. (Michael Busch) New features diff --git a/src/java/org/apache/lucene/index/ParallelReader.java b/src/java/org/apache/lucene/index/ParallelReader.java index ef424e20ba6..34d3a80247b 100644 --- a/src/java/org/apache/lucene/index/ParallelReader.java +++ b/src/java/org/apache/lucene/index/ParallelReader.java @@ -279,6 +279,43 @@ public class ParallelReader extends IndexReader { ensureOpen(); return new ParallelTermPositions(); } + + /** + * Checks recursively if all subreaders are up to date. + */ + public boolean isCurrent() throws CorruptIndexException, IOException { + for (int i = 0; i < readers.size(); i++) { + if (!((IndexReader)readers.get(i)).isCurrent()) { + return false; + } + } + + // all subreaders are up to date + return true; + } + + /** + * Checks recursively if all subindexes are optimized + */ + public boolean isOptimized() { + for (int i = 0; i < readers.size(); i++) { + if (!((IndexReader)readers.get(i)).isOptimized()) { + return false; + } + } + + // all subindexes are optimized + return true; + } + + + /** Not implemented. + * @throws UnsupportedOperationException + */ + public long getVersion() { + throw new UnsupportedOperationException("ParallelReader does not support this method."); + } + protected void doCommit() throws IOException { for (int i = 0; i < readers.size(); i++) @@ -450,3 +487,4 @@ public class ParallelReader extends IndexReader { } + diff --git a/src/test/org/apache/lucene/index/TestParallelReader.java b/src/test/org/apache/lucene/index/TestParallelReader.java index 11b46b5acec..39dd154c869 100644 --- a/src/test/org/apache/lucene/index/TestParallelReader.java +++ b/src/test/org/apache/lucene/index/TestParallelReader.java @@ -120,7 +120,81 @@ public class TestParallelReader extends TestCase { // expected exception } } + + public void testIsCurrent() throws IOException { + Directory dir1 = getDir1(); + Directory dir2 = getDir1(); + ParallelReader pr = new ParallelReader(); + pr.add(IndexReader.open(dir1)); + pr.add(IndexReader.open(dir2)); + + assertTrue(pr.isCurrent()); + IndexReader modifier = IndexReader.open(dir1); + modifier.setNorm(0, "f1", 100); + modifier.close(); + + // one of the two IndexReaders which ParallelReader is using + // is not current anymore + assertFalse(pr.isCurrent()); + + modifier = IndexReader.open(dir2); + modifier.setNorm(0, "f3", 100); + modifier.close(); + + // now both are not current anymore + assertFalse(pr.isCurrent()); + } + public void testIsOptimized() throws IOException { + Directory dir1 = getDir1(); + Directory dir2 = getDir1(); + + // add another document to ensure that the indexes are not optimized + IndexWriter modifier = new IndexWriter(dir1, new StandardAnalyzer()); + Document d = new Document(); + d.add(new Field("f1", "v1", Field.Store.YES, Field.Index.TOKENIZED)); + modifier.addDocument(d); + modifier.close(); + + modifier = new IndexWriter(dir2, new StandardAnalyzer()); + d = new Document(); + d.add(new Field("f2", "v2", Field.Store.YES, Field.Index.TOKENIZED)); + modifier.addDocument(d); + modifier.close(); + + + ParallelReader pr = new ParallelReader(); + pr.add(IndexReader.open(dir1)); + pr.add(IndexReader.open(dir2)); + assertFalse(pr.isOptimized()); + pr.close(); + + modifier = new IndexWriter(dir1, new StandardAnalyzer()); + modifier.optimize(); + modifier.close(); + + pr = new ParallelReader(); + pr.add(IndexReader.open(dir1)); + pr.add(IndexReader.open(dir2)); + // just one of the two indexes are optimized + assertFalse(pr.isOptimized()); + pr.close(); + + + modifier = new IndexWriter(dir2, new StandardAnalyzer()); + modifier.optimize(); + modifier.close(); + + pr = new ParallelReader(); + pr.add(IndexReader.open(dir1)); + pr.add(IndexReader.open(dir2)); + // now both indexes are optimized + assertTrue(pr.isOptimized()); + pr.close(); + + } + + private void queryTest(Query query) throws IOException { Hits parallelHits = parallel.search(query); Hits singleHits = single.search(query); @@ -136,7 +210,7 @@ public class TestParallelReader extends TestCase { } } - // Fiels 1-4 indexed together: + // Fields 1-4 indexed together: private Searcher single() throws IOException { Directory dir = new MockRAMDirectory(); IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(), true);