Merge remote-tracking branch 'origin/master'

This commit is contained in:
Noble Paul 2017-01-31 23:10:33 +10:30
commit d9edc0b741
4 changed files with 91 additions and 4 deletions

View File

@ -108,6 +108,9 @@ Bug Fixes
* LUCENE-7630: Fix (Edge)NGramTokenFilter to no longer drop payloads
and preserve all attributes. (Nathan Gass via Uwe Schindler)
* LUCENE-7670: AnalyzingInfixSuggester should not immediately open an
IndexWriter over an already-built index. (Steve Rowe)
Improvements
* LUCENE-7055: Added Weight#scorerSupplier, which allows to estimate the cost

View File

@ -249,9 +249,7 @@ public class AnalyzingInfixSuggester extends Lookup implements Closeable {
if (DirectoryReader.indexExists(dir)) {
// Already built; open it:
writer = new IndexWriter(dir,
getIndexWriterConfig(getGramAnalyzer(), IndexWriterConfig.OpenMode.APPEND));
searcherMgr = new SearcherManager(writer, null);
searcherMgr = new SearcherManager(dir, null);
}
}

View File

@ -1360,7 +1360,8 @@ public class AnalyzingInfixSuggesterTest extends LuceneTestCase {
// * SearcherManager's IndexWriter reference should be closed
// (as evidenced by maybeRefreshBlocking() throwing AlreadyClosedException)
Analyzer a = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
MyAnalyzingInfixSuggester suggester = new MyAnalyzingInfixSuggester(newDirectory(), a, a, 3, false,
Path tempDir = createTempDir("analyzingInfixContext");
final MyAnalyzingInfixSuggester suggester = new MyAnalyzingInfixSuggester(newFSDirectory(tempDir), a, a, 3, false,
AnalyzingInfixSuggester.DEFAULT_ALL_TERMS_REQUIRED, AnalyzingInfixSuggester.DEFAULT_HIGHLIGHT, true);
suggester.build(new InputArrayIterator(sharedInputs));
assertNull(suggester.getIndexWriter());
@ -1368,6 +1369,16 @@ public class AnalyzingInfixSuggesterTest extends LuceneTestCase {
expectThrows(AlreadyClosedException.class, () -> suggester.getSearcherManager().maybeRefreshBlocking());
suggester.close();
// After instantiating from an already-built suggester dir:
// * The IndexWriter should be null
// * The SearcherManager should be non-null
final MyAnalyzingInfixSuggester suggester2 = new MyAnalyzingInfixSuggester(newFSDirectory(tempDir), a, a, 3, false,
AnalyzingInfixSuggester.DEFAULT_ALL_TERMS_REQUIRED, AnalyzingInfixSuggester.DEFAULT_HIGHLIGHT, true);
assertNull(suggester2.getIndexWriter());
assertNotNull(suggester2.getSearcherManager());
suggester2.close();
a.close();
}

View File

@ -42,12 +42,20 @@ import org.apache.solr.update.AddUpdateCommand;
import org.apache.solr.update.CommitUpdateCommand;
import org.apache.solr.update.UpdateHandler;
import org.apache.solr.util.ReadOnlyCoresLocator;
import org.junit.BeforeClass;
import org.junit.Test;
public class TestLazyCores extends SolrTestCaseJ4 {
private File solrHomeDirectory;
@BeforeClass
public static void setupClass() throws Exception {
// Need to use a disk-based directory because there are tests that close a core after adding documents
// then expect to be able to re-open that core and execute a search
useFactory("solr.StandardDirectoryFactory");
}
private static CoreDescriptor makeCoreDescriptor(CoreContainer cc, String coreName, String isTransient, String loadOnStartup) {
return new CoreDescriptor(cc, coreName, cc.getCoreRootDirectory().resolve(coreName),
CoreDescriptor.CORE_TRANSIENT, isTransient,
@ -721,4 +729,71 @@ public class TestLazyCores extends SolrTestCaseJ4 {
}
}
// Insure that when a core is aged out of the transient cache, any uncommitted docs are preserved.
// Note, this needs FS-based indexes to persist!
// Cores 2, 3, 6, 7, 8, 9 are transient
@Test
public void testNoCommit() throws Exception {
CoreContainer cc = init();
String[] coreList = new String[]{
"collection2",
"collection3",
"collection6",
"collection7",
"collection8",
"collection9"
};
try {
// First, go through all the transient cores and add some docs. DO NOT COMMIT!
// The aged-out core should commit the docs when it gets closed.
List<SolrCore> openCores = new ArrayList<>();
for (String coreName : coreList) {
SolrCore core = cc.getCore(coreName);
openCores.add(core);
add10(core);
}
// Just proving that some cores have been aged out.
checkNotInCores(cc, "collection2", "collection3");
// Close our get of all cores above.
for (SolrCore core : openCores) core.close();
openCores.clear();
// We still should have 6, 7, 8, 9 loaded, their reference counts have NOT dropped to zero
checkInCores(cc, "collection6", "collection7", "collection8", "collection9");
for (String coreName : coreList) {
// The point of this test is to insure that when cores are aged out and re-opened
// that the docs are there, so insure that the core we're testing is gone, gone, gone.
checkNotInCores(cc, coreName);
// Load the core up again.
SolrCore core = cc.getCore(coreName);
openCores.add(core);
// Insure docs are still there.
check10(core);
}
for (SolrCore core : openCores) core.close();
} finally {
cc.shutdown();
}
}
private void add10(SolrCore core) throws IOException {
for (int idx = 0; idx < 10; ++idx) {
addLazy(core, "id", "0" + idx);
}
SolrQueryRequest req = makeReq(core);
}
private void check10(SolrCore core) {
// Just get a couple of searches to work!
assertQ("test closing core without committing",
makeReq(core, "q", "*:*")
, "//result[@numFound='10']"
);
}
}