mirror of https://github.com/apache/lucene.git
LUCENE-832: ParallelReader fixed to not throw NPE if isCurrent(), isOptimized() or getVersion() is called.
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@561611 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
4b11c35b08
commit
60c6a847b5
|
@ -57,6 +57,9 @@ Bug fixes
|
|||
|
||||
8. LUCENE-970: FilterIndexReader now implements isOptimized(). Before
|
||||
a call of isOptimized() would throw a NPE. (Michael Busch)
|
||||
|
||||
9. LUCENE-832: ParallelReader fixed to not throw NPE if isCurrent(),
|
||||
isOptimized() or getVersion() is called. (Michael Busch)
|
||||
|
||||
New features
|
||||
|
||||
|
|
|
@ -279,6 +279,43 @@ public class ParallelReader extends IndexReader {
|
|||
ensureOpen();
|
||||
return new ParallelTermPositions();
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks recursively if all subreaders are up to date.
|
||||
*/
|
||||
public boolean isCurrent() throws CorruptIndexException, IOException {
|
||||
for (int i = 0; i < readers.size(); i++) {
|
||||
if (!((IndexReader)readers.get(i)).isCurrent()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// all subreaders are up to date
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks recursively if all subindexes are optimized
|
||||
*/
|
||||
public boolean isOptimized() {
|
||||
for (int i = 0; i < readers.size(); i++) {
|
||||
if (!((IndexReader)readers.get(i)).isOptimized()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// all subindexes are optimized
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
/** Not implemented.
|
||||
* @throws UnsupportedOperationException
|
||||
*/
|
||||
public long getVersion() {
|
||||
throw new UnsupportedOperationException("ParallelReader does not support this method.");
|
||||
}
|
||||
|
||||
|
||||
protected void doCommit() throws IOException {
|
||||
for (int i = 0; i < readers.size(); i++)
|
||||
|
@ -450,3 +487,4 @@ public class ParallelReader extends IndexReader {
|
|||
}
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -120,7 +120,81 @@ public class TestParallelReader extends TestCase {
|
|||
// expected exception
|
||||
}
|
||||
}
|
||||
|
||||
public void testIsCurrent() throws IOException {
|
||||
Directory dir1 = getDir1();
|
||||
Directory dir2 = getDir1();
|
||||
ParallelReader pr = new ParallelReader();
|
||||
pr.add(IndexReader.open(dir1));
|
||||
pr.add(IndexReader.open(dir2));
|
||||
|
||||
assertTrue(pr.isCurrent());
|
||||
IndexReader modifier = IndexReader.open(dir1);
|
||||
modifier.setNorm(0, "f1", 100);
|
||||
modifier.close();
|
||||
|
||||
// one of the two IndexReaders which ParallelReader is using
|
||||
// is not current anymore
|
||||
assertFalse(pr.isCurrent());
|
||||
|
||||
modifier = IndexReader.open(dir2);
|
||||
modifier.setNorm(0, "f3", 100);
|
||||
modifier.close();
|
||||
|
||||
// now both are not current anymore
|
||||
assertFalse(pr.isCurrent());
|
||||
}
|
||||
|
||||
public void testIsOptimized() throws IOException {
|
||||
Directory dir1 = getDir1();
|
||||
Directory dir2 = getDir1();
|
||||
|
||||
// add another document to ensure that the indexes are not optimized
|
||||
IndexWriter modifier = new IndexWriter(dir1, new StandardAnalyzer());
|
||||
Document d = new Document();
|
||||
d.add(new Field("f1", "v1", Field.Store.YES, Field.Index.TOKENIZED));
|
||||
modifier.addDocument(d);
|
||||
modifier.close();
|
||||
|
||||
modifier = new IndexWriter(dir2, new StandardAnalyzer());
|
||||
d = new Document();
|
||||
d.add(new Field("f2", "v2", Field.Store.YES, Field.Index.TOKENIZED));
|
||||
modifier.addDocument(d);
|
||||
modifier.close();
|
||||
|
||||
|
||||
ParallelReader pr = new ParallelReader();
|
||||
pr.add(IndexReader.open(dir1));
|
||||
pr.add(IndexReader.open(dir2));
|
||||
assertFalse(pr.isOptimized());
|
||||
pr.close();
|
||||
|
||||
modifier = new IndexWriter(dir1, new StandardAnalyzer());
|
||||
modifier.optimize();
|
||||
modifier.close();
|
||||
|
||||
pr = new ParallelReader();
|
||||
pr.add(IndexReader.open(dir1));
|
||||
pr.add(IndexReader.open(dir2));
|
||||
// just one of the two indexes are optimized
|
||||
assertFalse(pr.isOptimized());
|
||||
pr.close();
|
||||
|
||||
|
||||
modifier = new IndexWriter(dir2, new StandardAnalyzer());
|
||||
modifier.optimize();
|
||||
modifier.close();
|
||||
|
||||
pr = new ParallelReader();
|
||||
pr.add(IndexReader.open(dir1));
|
||||
pr.add(IndexReader.open(dir2));
|
||||
// now both indexes are optimized
|
||||
assertTrue(pr.isOptimized());
|
||||
pr.close();
|
||||
|
||||
}
|
||||
|
||||
|
||||
private void queryTest(Query query) throws IOException {
|
||||
Hits parallelHits = parallel.search(query);
|
||||
Hits singleHits = single.search(query);
|
||||
|
@ -136,7 +210,7 @@ public class TestParallelReader extends TestCase {
|
|||
}
|
||||
}
|
||||
|
||||
// Fiels 1-4 indexed together:
|
||||
// Fields 1-4 indexed together:
|
||||
private Searcher single() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(), true);
|
||||
|
|
Loading…
Reference in New Issue